hexsha
stringlengths 40
40
| size
int64 6
14.9M
| ext
stringclasses 1
value | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 6
260
| max_stars_repo_name
stringlengths 6
119
| max_stars_repo_head_hexsha
stringlengths 40
41
| max_stars_repo_licenses
list | max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 6
260
| max_issues_repo_name
stringlengths 6
119
| max_issues_repo_head_hexsha
stringlengths 40
41
| max_issues_repo_licenses
list | max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 6
260
| max_forks_repo_name
stringlengths 6
119
| max_forks_repo_head_hexsha
stringlengths 40
41
| max_forks_repo_licenses
list | max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | avg_line_length
float64 2
1.04M
| max_line_length
int64 2
11.2M
| alphanum_fraction
float64 0
1
| cells
list | cell_types
list | cell_type_groups
list |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a62330d935b347c257944531afc7d479a88d6e3
| 246,734 |
ipynb
|
Jupyter Notebook
|
notebooks/FFNN_retrain_top.ipynb
|
NGrech/plant-leaf-diseases-identification
|
d7d3ed0d5a20500efc7e6b3434571fb32fb7279a
|
[
"MIT"
] | 1 |
2021-11-21T11:49:16.000Z
|
2021-11-21T11:49:16.000Z
|
notebooks/FFNN_retrain_top.ipynb
|
NGrech/plant-leaf-diseases-identification
|
d7d3ed0d5a20500efc7e6b3434571fb32fb7279a
|
[
"MIT"
] | null | null | null |
notebooks/FFNN_retrain_top.ipynb
|
NGrech/plant-leaf-diseases-identification
|
d7d3ed0d5a20500efc7e6b3434571fb32fb7279a
|
[
"MIT"
] | null | null | null | 65.481423 | 1,312 | 0.636945 |
[
[
[
"# Retraining of top performing FFNN",
"_____no_output_____"
],
[
"## Imports",
"_____no_output_____"
]
],
[
[
"# General imports \nimport sys\nimport os \nsys.path.insert(1, os.path.join(os.pardir, 'src'))\nfrom itertools import product\n\n# Data imports\nimport cv2\nimport torch\nimport mlflow\nimport numpy as np\nfrom mlflow.tracking.client import MlflowClient\nfrom torchvision import datasets, transforms\n\n# Homebrew imports \nimport model\nfrom utils import one_hot_encode_index\nfrom optimizers import Adam\nfrom activations import Softmax, ReLU\nfrom layers import Dropout, LinearLayer\nfrom loss import CategoricalCrossEntropyLoss\n\n# pytorch imports \nfrom torch import nn, cuda, optim, no_grad\nimport torch.nn.functional as F\nfrom torchvision import transforms\n\n## TESTING \nimport importlib\nimportlib.reload(model)\n##",
"_____no_output_____"
]
],
[
[
"## Finding best runs",
"_____no_output_____"
]
],
[
[
"# querying results to see best 2 performing homebrew models\nquery = \"params.data_split = '90/10' and params.type = 'FFNN' and params.framework = 'homebrew'\"\nhb_runs = MlflowClient().search_runs(\n experiment_ids=\"8\",\n filter_string=query,\n max_results=1,\n order_by=[\"metrics.validation_accuracy DESC\"]\n )\n\nquery = \"params.data_split = '90/10' and params.type = 'FFNN' and params.framework = 'pytorch'\"\npt_runs = MlflowClient().search_runs(\n experiment_ids=\"8\",\n filter_string=query,\n max_results=1,\n order_by=[\"metrics.validation_accuracy DESC\"]\n )",
"_____no_output_____"
]
],
[
[
"## Setup data loaders",
"_____no_output_____"
]
],
[
[
"train_transforms = transforms.Compose([transforms.RandomRotation(30),\n transforms.RandomResizedCrop(32),\n transforms.RandomHorizontalFlip(),\n transforms.Grayscale(num_output_channels=1),\n transforms.ToTensor(),\n transforms.Normalize([0.5],[0.5])\n ])\n\ntest_transforms = transforms.Compose([transforms.Resize(33),\n transforms.CenterCrop(32),\n transforms.Grayscale(num_output_channels=1),\n transforms.ToTensor(),\n transforms.Normalize([0.5],[0.5])\n ])\n\n# setting up data loaders\ndata_dir = os.path.join(os.pardir, 'data', 'Plant_leave_diseases_32')\n\ntrain_data = datasets.ImageFolder(os.path.join(data_dir, 'train'), transform=train_transforms)\ntest_data = datasets.ImageFolder(os.path.join(data_dir, 'validation'), transform=test_transforms)",
"_____no_output_____"
]
],
[
[
"## Training 'Homebrew' models",
"_____no_output_____"
]
],
[
[
"# Getting Configs \npar = hb_runs[0].data.params\nconfig = {'data_split': par['data_split'],\n 'decay': np.float64(par['decay']),\n 'dropout': np.float64(par['dropout']),\n 'framework': par['framework'],\n 'learning_rate': np.float64(par['learning_rate']),\n 'max_epochs': int(par['max_epochs']),\n 'resolution': int(par['resolution']),\n 'type': par['type']}\n\nmlflow.set_experiment(\"Plant Leaf Disease\")\n\ntrain_loader = torch.utils.data.DataLoader(train_data, batch_size=64, shuffle=True)\nvalidation_loader = torch.utils.data.DataLoader(test_data, batch_size=64, shuffle=True)\n\n# initialize model \nmdl = model.Model(Adam(learning_rate=config['learning_rate'], decay=config['decay']),\n CategoricalCrossEntropyLoss())\n\n# Config early stop \nmdl.add_early_stop(25)\n\n# save config \nmdl.set_save_config(model_name='FFNN_top_homebrew', save_path=os.path.join('models'))\n\n# Defining architecture \nmdl.set_sequence([\n LinearLayer(32*32, 1024),\n ReLU(),\n Dropout(config['dropout']),\n LinearLayer(1024, 512), \n ReLU(),\n Dropout(config['dropout']),\n LinearLayer(512, 39),\n Softmax()\n ])\n\n\nwith mlflow.start_run():\n mlflow.log_params(config)\n mdl.train_with_loader(train_loader, epochs=config['max_epochs'], validation_loader=validation_loader, cls_count=39, flatten_input=True)",
"=== Epoch: 1 ===\nStep: 0/865, accuracy0.047, loss4.201, learning rate 0.0010000 \nStep: 100/865, accuracy0.078, loss3.362, learning rate 0.0009901 \nStep: 200/865, accuracy0.234, loss3.145, learning rate 0.0009804 \nStep: 300/865, accuracy0.141, loss3.415, learning rate 0.0009709 \nStep: 400/865, accuracy0.203, loss3.305, learning rate 0.0009615 \nStep: 500/865, accuracy0.109, loss3.499, learning rate 0.0009524 \nStep: 600/865, accuracy0.062, loss3.463, learning rate 0.0009434 \nStep: 700/865, accuracy0.109, loss3.182, learning rate 0.0009346 \nStep: 800/865, accuracy0.156, loss3.321, learning rate 0.0009259 \nStep: 864/865, accuracy0.065, loss3.521, learning rate 0.0009205 \nEpoch: 1/200, accuracy0.149, loss3.305, learning rate 0.001\nEstimated reamining runtime: 7:47:15.439634\n--Validation--\nValidation : Accuracy: 0.230, Loss: 2.935\n=== Epoch: 2 ===\nStep: 0/865, accuracy0.094, loss3.410, learning rate 0.0009204 \nStep: 100/865, accuracy0.203, loss3.033, learning rate 0.0009120 \nStep: 200/865, accuracy0.078, loss3.519, learning rate 0.0009038 \nStep: 300/865, accuracy0.109, loss3.340, learning rate 0.0008957 \nStep: 400/865, accuracy0.203, loss3.141, learning rate 0.0008877 \nStep: 500/865, accuracy0.188, loss3.076, learning rate 0.0008799 \nStep: 600/865, accuracy0.219, loss3.010, learning rate 0.0008722 \nStep: 700/865, accuracy0.156, loss3.078, learning rate 0.0008647 \nStep: 800/865, accuracy0.109, loss3.380, learning rate 0.0008573 \nStep: 864/865, accuracy0.097, loss3.318, learning rate 0.0008526 \nEpoch: 2/200, accuracy0.175, loss3.163, learning rate 0.001\nEstimated reamining runtime: 7:23:07.909794\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.251, Loss: 2.786\n=== Epoch: 3 ===\nStep: 0/865, accuracy0.188, loss3.135, learning rate 0.0008525 \nStep: 100/865, accuracy0.234, loss3.098, learning rate 0.0008453 \nStep: 200/865, accuracy0.188, loss3.167, learning rate 0.0008382 \nStep: 300/865, accuracy0.219, loss3.027, learning rate 0.0008313 \nStep: 400/865, accuracy0.094, loss3.220, learning rate 0.0008244 \nStep: 500/865, accuracy0.172, loss3.237, learning rate 0.0008177 \nStep: 600/865, accuracy0.172, loss3.264, learning rate 0.0008110 \nStep: 700/865, accuracy0.203, loss3.024, learning rate 0.0008045 \nStep: 800/865, accuracy0.125, loss3.002, learning rate 0.0007981 \nStep: 864/865, accuracy0.323, loss2.604, learning rate 0.0007940 \nEpoch: 3/200, accuracy0.185, loss3.112, learning rate 0.001\nEstimated reamining runtime: 7:15:22.022168\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.261, Loss: 2.748\n=== Epoch: 4 ===\nStep: 0/865, accuracy0.188, loss3.080, learning rate 0.0007940 \nStep: 100/865, accuracy0.172, loss2.919, learning rate 0.0007877 \nStep: 200/865, accuracy0.141, loss3.137, learning rate 0.0007816 \nStep: 300/865, accuracy0.203, loss2.999, learning rate 0.0007755 \nStep: 400/865, accuracy0.172, loss3.235, learning rate 0.0007695 \nStep: 500/865, accuracy0.234, loss3.002, learning rate 0.0007637 \nStep: 600/865, accuracy0.219, loss2.897, learning rate 0.0007579 \nStep: 700/865, accuracy0.156, loss3.148, learning rate 0.0007522 \nStep: 800/865, accuracy0.156, loss3.160, learning rate 0.0007465 \nStep: 864/865, accuracy0.226, loss3.008, learning rate 0.0007430 \nEpoch: 4/200, accuracy0.191, loss3.064, learning rate 0.001\nEstimated reamining runtime: 7:08:32.849545\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.265, Loss: 2.709\n=== Epoch: 5 ===\nStep: 0/865, accuracy0.266, loss2.818, learning rate 0.0007429 \nStep: 100/865, accuracy0.219, loss3.086, learning rate 0.0007375 \nStep: 200/865, accuracy0.250, loss2.945, learning rate 0.0007321 \nStep: 300/865, accuracy0.281, loss2.960, learning rate 0.0007267 \nStep: 400/865, accuracy0.188, loss3.197, learning rate 0.0007215 \nStep: 500/865, accuracy0.281, loss2.885, learning rate 0.0007163 \nStep: 600/865, accuracy0.234, loss2.849, learning rate 0.0007112 \nStep: 700/865, accuracy0.203, loss3.009, learning rate 0.0007062 \nStep: 800/865, accuracy0.234, loss2.894, learning rate 0.0007013 \nStep: 864/865, accuracy0.258, loss3.001, learning rate 0.0006981 \nEpoch: 5/200, accuracy0.198, loss3.035, learning rate 0.001\nEstimated reamining runtime: 7:04:29.800472\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.281, Loss: 2.650\n=== Epoch: 6 ===\nStep: 0/865, accuracy0.203, loss3.003, learning rate 0.0006981 \nStep: 100/865, accuracy0.219, loss2.896, learning rate 0.0006932 \nStep: 200/865, accuracy0.234, loss3.039, learning rate 0.0006885 \nStep: 300/865, accuracy0.250, loss2.822, learning rate 0.0006838 \nStep: 400/865, accuracy0.234, loss2.932, learning rate 0.0006791 \nStep: 500/865, accuracy0.125, loss3.022, learning rate 0.0006745 \nStep: 600/865, accuracy0.188, loss2.934, learning rate 0.0006700 \nStep: 700/865, accuracy0.172, loss3.043, learning rate 0.0006656 \nStep: 800/865, accuracy0.250, loss2.849, learning rate 0.0006612 \nStep: 864/865, accuracy0.065, loss3.100, learning rate 0.0006584 \nEpoch: 6/200, accuracy0.204, loss3.001, learning rate 0.001\nEstimated reamining runtime: 6:58:25.142220\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.285, Loss: 2.608\n=== Epoch: 7 ===\nStep: 0/865, accuracy0.297, loss2.818, learning rate 0.0006583 \nStep: 100/865, accuracy0.250, loss2.912, learning rate 0.0006540 \nStep: 200/865, accuracy0.156, loss3.118, learning rate 0.0006498 \nStep: 300/865, accuracy0.141, loss3.004, learning rate 0.0006456 \nStep: 400/865, accuracy0.141, loss3.152, learning rate 0.0006414 \nStep: 500/865, accuracy0.156, loss3.091, learning rate 0.0006373 \nStep: 600/865, accuracy0.219, loss3.063, learning rate 0.0006333 \nStep: 700/865, accuracy0.219, loss2.921, learning rate 0.0006293 \nStep: 800/865, accuracy0.234, loss2.967, learning rate 0.0006254 \nStep: 864/865, accuracy0.258, loss2.911, learning rate 0.0006229 \nEpoch: 7/200, accuracy0.212, loss2.964, learning rate 0.001\nEstimated reamining runtime: 6:48:29.069177\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.296, Loss: 2.559\n=== Epoch: 8 ===\nStep: 0/865, accuracy0.250, loss3.188, learning rate 0.0006229 \nStep: 100/865, accuracy0.109, loss3.078, learning rate 0.0006190 \nStep: 200/865, accuracy0.141, loss3.165, learning rate 0.0006152 \nStep: 300/865, accuracy0.266, loss2.667, learning rate 0.0006114 \nStep: 400/865, accuracy0.188, loss2.994, learning rate 0.0006077 \nStep: 500/865, accuracy0.188, loss3.130, learning rate 0.0006040 \nStep: 600/865, accuracy0.281, loss2.700, learning rate 0.0006004 \nStep: 700/865, accuracy0.266, loss2.773, learning rate 0.0005968 \nStep: 800/865, accuracy0.156, loss3.015, learning rate 0.0005933 \nStep: 864/865, accuracy0.194, loss3.324, learning rate 0.0005911 \nEpoch: 8/200, accuracy0.213, loss2.950, learning rate 0.001\nEstimated reamining runtime: 6:41:10.765984\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.326, Loss: 2.503\n=== Epoch: 9 ===\nStep: 0/865, accuracy0.328, loss2.752, learning rate 0.0005910 \nStep: 100/865, accuracy0.281, loss2.784, learning rate 0.0005875 \nStep: 200/865, accuracy0.219, loss2.927, learning rate 0.0005841 \nStep: 300/865, accuracy0.188, loss3.014, learning rate 0.0005807 \nStep: 400/865, accuracy0.219, loss2.764, learning rate 0.0005774 \nStep: 500/865, accuracy0.188, loss2.783, learning rate 0.0005741 \nStep: 600/865, accuracy0.109, loss3.016, learning rate 0.0005708 \nStep: 700/865, accuracy0.219, loss2.842, learning rate 0.0005675 \nStep: 800/865, accuracy0.125, loss3.221, learning rate 0.0005643 \nStep: 864/865, accuracy0.129, loss2.939, learning rate 0.0005623 \nEpoch: 9/200, accuracy0.217, loss2.925, learning rate 0.001\nEstimated reamining runtime: 6:35:25.829850\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.327, Loss: 2.483\n=== Epoch: 10 ===\nStep: 0/865, accuracy0.125, loss3.201, learning rate 0.0005623 \nStep: 100/865, accuracy0.125, loss3.371, learning rate 0.0005591 \nStep: 200/865, accuracy0.188, loss2.798, learning rate 0.0005560 \nStep: 300/865, accuracy0.156, loss3.173, learning rate 0.0005529 \nStep: 400/865, accuracy0.203, loss3.079, learning rate 0.0005499 \nStep: 500/865, accuracy0.297, loss2.721, learning rate 0.0005469 \nStep: 600/865, accuracy0.203, loss2.969, learning rate 0.0005439 \nStep: 700/865, accuracy0.328, loss2.938, learning rate 0.0005410 \nStep: 800/865, accuracy0.188, loss2.790, learning rate 0.0005381 \nStep: 864/865, accuracy0.194, loss3.143, learning rate 0.0005362 \nEpoch: 10/200, accuracy0.222, loss2.904, learning rate 0.001\nEstimated reamining runtime: 6:30:29.475517\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.324, Loss: 2.473\n=== Epoch: 11 ===\nStep: 0/865, accuracy0.188, loss3.157, learning rate 0.0005362 \nStep: 100/865, accuracy0.203, loss2.962, learning rate 0.0005333 \nStep: 200/865, accuracy0.219, loss2.650, learning rate 0.0005305 \nStep: 300/865, accuracy0.266, loss2.869, learning rate 0.0005277 \nStep: 400/865, accuracy0.234, loss2.872, learning rate 0.0005249 \nStep: 500/865, accuracy0.234, loss2.906, learning rate 0.0005222 \nStep: 600/865, accuracy0.312, loss2.606, learning rate 0.0005195 \nStep: 700/865, accuracy0.234, loss3.204, learning rate 0.0005168 \nStep: 800/865, accuracy0.266, loss2.674, learning rate 0.0005141 \nStep: 864/865, accuracy0.258, loss3.084, learning rate 0.0005125 \nEpoch: 11/200, accuracy0.227, loss2.889, learning rate 0.001\nEstimated reamining runtime: 6:26:56.288918\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.338, Loss: 2.454\n=== Epoch: 12 ===\nStep: 0/865, accuracy0.250, loss2.811, learning rate 0.0005124 \nStep: 100/865, accuracy0.281, loss2.821, learning rate 0.0005098 \nStep: 200/865, accuracy0.266, loss2.794, learning rate 0.0005072 \nStep: 300/865, accuracy0.266, loss2.700, learning rate 0.0005047 \nStep: 400/865, accuracy0.188, loss2.990, learning rate 0.0005021 \nStep: 500/865, accuracy0.297, loss2.740, learning rate 0.0004996 \nStep: 600/865, accuracy0.281, loss2.777, learning rate 0.0004971 \nStep: 700/865, accuracy0.266, loss2.500, learning rate 0.0004947 \nStep: 800/865, accuracy0.266, loss2.622, learning rate 0.0004922 \nStep: 864/865, accuracy0.323, loss3.059, learning rate 0.0004907 \nEpoch: 12/200, accuracy0.229, loss2.867, learning rate 0.000\nEstimated reamining runtime: 6:24:09.521176\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.340, Loss: 2.396\n=== Epoch: 13 ===\nStep: 0/865, accuracy0.344, loss2.504, learning rate 0.0004907 \nStep: 100/865, accuracy0.312, loss2.578, learning rate 0.0004883 \nStep: 200/865, accuracy0.297, loss2.786, learning rate 0.0004859 \nStep: 300/865, accuracy0.156, loss3.081, learning rate 0.0004836 \nStep: 400/865, accuracy0.281, loss2.721, learning rate 0.0004812 \nStep: 500/865, accuracy0.219, loss2.807, learning rate 0.0004789 \nStep: 600/865, accuracy0.203, loss2.922, learning rate 0.0004766 \nStep: 700/865, accuracy0.297, loss2.725, learning rate 0.0004744 \nStep: 800/865, accuracy0.250, loss2.956, learning rate 0.0004721 \nStep: 864/865, accuracy0.290, loss2.535, learning rate 0.0004707 \nEpoch: 13/200, accuracy0.233, loss2.859, learning rate 0.000\nEstimated reamining runtime: 6:21:33.388616\n--Validation--\nValidation : Accuracy: 0.351, Loss: 2.409\n=== Epoch: 14 ===\nStep: 0/865, accuracy0.266, loss2.930, learning rate 0.0004707 \nStep: 100/865, accuracy0.281, loss2.794, learning rate 0.0004685 \nStep: 200/865, accuracy0.250, loss2.672, learning rate 0.0004663 \nStep: 300/865, accuracy0.172, loss2.954, learning rate 0.0004641 \nStep: 400/865, accuracy0.172, loss2.978, learning rate 0.0004620 \nStep: 500/865, accuracy0.172, loss2.946, learning rate 0.0004599 \nStep: 600/865, accuracy0.219, loss2.956, learning rate 0.0004578 \nStep: 700/865, accuracy0.234, loss2.808, learning rate 0.0004557 \nStep: 800/865, accuracy0.219, loss2.782, learning rate 0.0004536 \nStep: 864/865, accuracy0.194, loss2.781, learning rate 0.0004523 \nEpoch: 14/200, accuracy0.234, loss2.842, learning rate 0.000\nEstimated reamining runtime: 6:19:14.403063\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.349, Loss: 2.379\n=== Epoch: 15 ===\nStep: 0/865, accuracy0.172, loss3.036, learning rate 0.0004523 \nStep: 100/865, accuracy0.297, loss2.780, learning rate 0.0004502 \nStep: 200/865, accuracy0.219, loss3.122, learning rate 0.0004482 \nStep: 300/865, accuracy0.219, loss2.733, learning rate 0.0004462 \nStep: 400/865, accuracy0.203, loss2.674, learning rate 0.0004442 \nStep: 500/865, accuracy0.234, loss2.828, learning rate 0.0004423 \nStep: 600/865, accuracy0.219, loss2.834, learning rate 0.0004403 \nStep: 700/865, accuracy0.188, loss3.131, learning rate 0.0004384 \nStep: 800/865, accuracy0.297, loss2.665, learning rate 0.0004365 \nStep: 864/865, accuracy0.194, loss2.980, learning rate 0.0004353 \nEpoch: 15/200, accuracy0.237, loss2.834, learning rate 0.000\nEstimated reamining runtime: 6:16:50.830832\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.349, Loss: 2.379\n=== Epoch: 16 ===\nStep: 0/865, accuracy0.281, loss2.743, learning rate 0.0004353 \nStep: 100/865, accuracy0.281, loss2.946, learning rate 0.0004334 \nStep: 200/865, accuracy0.281, loss2.543, learning rate 0.0004315 \nStep: 300/865, accuracy0.234, loss2.686, learning rate 0.0004296 \nStep: 400/865, accuracy0.156, loss3.246, learning rate 0.0004278 \nStep: 500/865, accuracy0.250, loss3.050, learning rate 0.0004260 \nStep: 600/865, accuracy0.297, loss2.760, learning rate 0.0004242 \nStep: 700/865, accuracy0.359, loss2.665, learning rate 0.0004224 \nStep: 800/865, accuracy0.172, loss2.886, learning rate 0.0004206 \nStep: 864/865, accuracy0.226, loss2.947, learning rate 0.0004195 \nEpoch: 16/200, accuracy0.240, loss2.820, learning rate 0.000\nEstimated reamining runtime: 6:14:49.200604\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.353, Loss: 2.338\n=== Epoch: 17 ===\nStep: 0/865, accuracy0.281, loss2.558, learning rate 0.0004195 \nStep: 100/865, accuracy0.203, loss3.009, learning rate 0.0004177 \nStep: 200/865, accuracy0.234, loss2.770, learning rate 0.0004160 \nStep: 300/865, accuracy0.203, loss2.854, learning rate 0.0004143 \nStep: 400/865, accuracy0.250, loss2.544, learning rate 0.0004125 \nStep: 500/865, accuracy0.172, loss2.845, learning rate 0.0004108 \nStep: 600/865, accuracy0.281, loss2.903, learning rate 0.0004092 \nStep: 700/865, accuracy0.203, loss2.943, learning rate 0.0004075 \nStep: 800/865, accuracy0.156, loss2.858, learning rate 0.0004058 \nStep: 864/865, accuracy0.290, loss2.814, learning rate 0.0004048 \nEpoch: 17/200, accuracy0.238, loss2.813, learning rate 0.000\nEstimated reamining runtime: 6:13:06.749141\n--Validation--\nValidation : Accuracy: 0.346, Loss: 2.358\n=== Epoch: 18 ===\nStep: 0/865, accuracy0.219, loss2.963, learning rate 0.0004048 \nStep: 100/865, accuracy0.344, loss2.456, learning rate 0.0004031 \nStep: 200/865, accuracy0.266, loss2.901, learning rate 0.0004015 \nStep: 300/865, accuracy0.281, loss2.763, learning rate 0.0003999 \nStep: 400/865, accuracy0.188, loss3.076, learning rate 0.0003983 \nStep: 500/865, accuracy0.203, loss2.985, learning rate 0.0003967 \nStep: 600/865, accuracy0.219, loss2.824, learning rate 0.0003952 \nStep: 700/865, accuracy0.156, loss2.735, learning rate 0.0003936 \nStep: 800/865, accuracy0.203, loss2.797, learning rate 0.0003921 \nStep: 864/865, accuracy0.258, loss2.852, learning rate 0.0003911 \nEpoch: 18/200, accuracy0.248, loss2.798, learning rate 0.000\nEstimated reamining runtime: 6:11:35.581452\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.357, Loss: 2.331\n=== Epoch: 19 ===\nStep: 0/865, accuracy0.234, loss3.229, learning rate 0.0003911 \nStep: 100/865, accuracy0.250, loss2.702, learning rate 0.0003896 \nStep: 200/865, accuracy0.266, loss2.555, learning rate 0.0003880 \nStep: 300/865, accuracy0.281, loss2.575, learning rate 0.0003865 \nStep: 400/865, accuracy0.219, loss2.841, learning rate 0.0003851 \nStep: 500/865, accuracy0.328, loss2.576, learning rate 0.0003836 \nStep: 600/865, accuracy0.312, loss2.708, learning rate 0.0003821 \nStep: 700/865, accuracy0.359, loss2.668, learning rate 0.0003807 \nStep: 800/865, accuracy0.203, loss3.024, learning rate 0.0003792 \nStep: 864/865, accuracy0.387, loss2.338, learning rate 0.0003783 \nEpoch: 19/200, accuracy0.249, loss2.786, learning rate 0.000\nEstimated reamining runtime: 6:10:24.456120\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.378, Loss: 2.289\n=== Epoch: 20 ===\nStep: 0/865, accuracy0.391, loss2.574, learning rate 0.0003783 \nStep: 100/865, accuracy0.281, loss2.655, learning rate 0.0003769 \nStep: 200/865, accuracy0.234, loss2.835, learning rate 0.0003754 \nStep: 300/865, accuracy0.109, loss3.142, learning rate 0.0003740 \nStep: 400/865, accuracy0.266, loss2.620, learning rate 0.0003726 \nStep: 500/865, accuracy0.266, loss2.662, learning rate 0.0003713 \nStep: 600/865, accuracy0.203, loss3.212, learning rate 0.0003699 \nStep: 700/865, accuracy0.266, loss2.688, learning rate 0.0003685 \nStep: 800/865, accuracy0.188, loss2.803, learning rate 0.0003672 \nStep: 864/865, accuracy0.355, loss2.641, learning rate 0.0003663 \nEpoch: 20/200, accuracy0.251, loss2.780, learning rate 0.000\nEstimated reamining runtime: 6:09:19.974566\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.367, Loss: 2.284\n=== Epoch: 21 ===\nStep: 0/865, accuracy0.391, loss2.404, learning rate 0.0003663 \nStep: 100/865, accuracy0.141, loss2.971, learning rate 0.0003650 \nStep: 200/865, accuracy0.250, loss2.818, learning rate 0.0003636 \nStep: 300/865, accuracy0.250, loss2.863, learning rate 0.0003623 \nStep: 400/865, accuracy0.297, loss2.807, learning rate 0.0003610 \nStep: 500/865, accuracy0.172, loss2.683, learning rate 0.0003597 \nStep: 600/865, accuracy0.250, loss2.924, learning rate 0.0003584 \nStep: 700/865, accuracy0.312, loss2.659, learning rate 0.0003571 \nStep: 800/865, accuracy0.422, loss2.484, learning rate 0.0003559 \nStep: 864/865, accuracy0.129, loss2.956, learning rate 0.0003551 \nEpoch: 21/200, accuracy0.254, loss2.767, learning rate 0.000\nEstimated reamining runtime: 6:08:16.115532\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.379, Loss: 2.258\n=== Epoch: 22 ===\nStep: 0/865, accuracy0.172, loss2.745, learning rate 0.0003551 \nStep: 100/865, accuracy0.250, loss2.720, learning rate 0.0003538 \nStep: 200/865, accuracy0.156, loss2.921, learning rate 0.0003525 \nStep: 300/865, accuracy0.297, loss2.570, learning rate 0.0003513 \nStep: 400/865, accuracy0.266, loss2.716, learning rate 0.0003501 \nStep: 500/865, accuracy0.172, loss2.791, learning rate 0.0003489 \nStep: 600/865, accuracy0.266, loss2.601, learning rate 0.0003476 \nStep: 700/865, accuracy0.219, loss2.811, learning rate 0.0003464 \nStep: 800/865, accuracy0.281, loss2.585, learning rate 0.0003452 \nStep: 864/865, accuracy0.290, loss2.282, learning rate 0.0003445 \nEpoch: 22/200, accuracy0.251, loss2.760, learning rate 0.000\nEstimated reamining runtime: 6:07:16.225939\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.379, Loss: 2.247\n=== Epoch: 23 ===\nStep: 0/865, accuracy0.219, loss2.624, learning rate 0.0003445 \nStep: 100/865, accuracy0.281, loss2.535, learning rate 0.0003433 \nStep: 200/865, accuracy0.141, loss2.937, learning rate 0.0003421 \nStep: 300/865, accuracy0.203, loss2.920, learning rate 0.0003409 \nStep: 400/865, accuracy0.281, loss2.658, learning rate 0.0003398 \nStep: 500/865, accuracy0.281, loss2.883, learning rate 0.0003386 \nStep: 600/865, accuracy0.266, loss2.730, learning rate 0.0003375 \nStep: 700/865, accuracy0.281, loss2.832, learning rate 0.0003364 \nStep: 800/865, accuracy0.234, loss2.972, learning rate 0.0003352 \nStep: 864/865, accuracy0.226, loss3.155, learning rate 0.0003345 \nEpoch: 23/200, accuracy0.255, loss2.754, learning rate 0.000\nEstimated reamining runtime: 6:06:21.669822\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.374, Loss: 2.242\n=== Epoch: 24 ===\nStep: 0/865, accuracy0.312, loss2.515, learning rate 0.0003345 \nStep: 100/865, accuracy0.250, loss2.774, learning rate 0.0003334 \nStep: 200/865, accuracy0.359, loss2.503, learning rate 0.0003323 \nStep: 300/865, accuracy0.250, loss2.514, learning rate 0.0003312 \nStep: 400/865, accuracy0.188, loss2.959, learning rate 0.0003301 \nStep: 500/865, accuracy0.188, loss2.766, learning rate 0.0003290 \nStep: 600/865, accuracy0.281, loss2.681, learning rate 0.0003279 \nStep: 700/865, accuracy0.297, loss2.571, learning rate 0.0003269 \nStep: 800/865, accuracy0.250, loss2.624, learning rate 0.0003258 \nStep: 864/865, accuracy0.290, loss2.627, learning rate 0.0003251 \nEpoch: 24/200, accuracy0.255, loss2.747, learning rate 0.000\nEstimated reamining runtime: 6:05:37.103824\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.387, Loss: 2.238\n=== Epoch: 25 ===\nStep: 0/865, accuracy0.250, loss2.714, learning rate 0.0003251 \nStep: 100/865, accuracy0.281, loss2.594, learning rate 0.0003240 \nStep: 200/865, accuracy0.234, loss2.804, learning rate 0.0003230 \nStep: 300/865, accuracy0.312, loss2.562, learning rate 0.0003220 \nStep: 400/865, accuracy0.188, loss2.884, learning rate 0.0003209 \nStep: 500/865, accuracy0.219, loss2.834, learning rate 0.0003199 \nStep: 600/865, accuracy0.234, loss2.622, learning rate 0.0003189 \nStep: 700/865, accuracy0.312, loss2.676, learning rate 0.0003179 \nStep: 800/865, accuracy0.250, loss2.672, learning rate 0.0003169 \nStep: 864/865, accuracy0.194, loss2.987, learning rate 0.0003162 \nEpoch: 25/200, accuracy0.258, loss2.738, learning rate 0.000\nEstimated reamining runtime: 6:04:50.881109\n--Validation--\nValidation : Accuracy: 0.381, Loss: 2.257\n=== Epoch: 26 ===\nStep: 0/865, accuracy0.172, loss2.964, learning rate 0.0003162 \nStep: 100/865, accuracy0.234, loss2.671, learning rate 0.0003152 \nStep: 200/865, accuracy0.281, loss2.673, learning rate 0.0003142 \nStep: 300/865, accuracy0.344, loss2.651, learning rate 0.0003132 \nStep: 400/865, accuracy0.266, loss2.676, learning rate 0.0003123 \nStep: 500/865, accuracy0.234, loss2.875, learning rate 0.0003113 \nStep: 600/865, accuracy0.312, loss2.790, learning rate 0.0003103 \nStep: 700/865, accuracy0.266, loss2.629, learning rate 0.0003094 \nStep: 800/865, accuracy0.281, loss2.583, learning rate 0.0003084 \nStep: 864/865, accuracy0.355, loss2.353, learning rate 0.0003078 \nEpoch: 26/200, accuracy0.260, loss2.736, learning rate 0.000\nEstimated reamining runtime: 6:04:03.653066\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.392, Loss: 2.202\n=== Epoch: 27 ===\nStep: 0/865, accuracy0.344, loss2.641, learning rate 0.0003078 \nStep: 100/865, accuracy0.219, loss3.019, learning rate 0.0003068 \nStep: 200/865, accuracy0.219, loss2.850, learning rate 0.0003059 \nStep: 300/865, accuracy0.234, loss2.672, learning rate 0.0003050 \nStep: 400/865, accuracy0.281, loss2.696, learning rate 0.0003040 \nStep: 500/865, accuracy0.172, loss2.757, learning rate 0.0003031 \nStep: 600/865, accuracy0.188, loss2.866, learning rate 0.0003022 \nStep: 700/865, accuracy0.391, loss2.671, learning rate 0.0003013 \nStep: 800/865, accuracy0.219, loss2.794, learning rate 0.0003004 \nStep: 864/865, accuracy0.258, loss2.692, learning rate 0.0002998 \nEpoch: 27/200, accuracy0.264, loss2.723, learning rate 0.000\nEstimated reamining runtime: 6:03:31.381649\n--Validation--\nValidation : Accuracy: 0.390, Loss: 2.212\n=== Epoch: 28 ===\nStep: 0/865, accuracy0.203, loss2.646, learning rate 0.0002998 \nStep: 100/865, accuracy0.203, loss2.719, learning rate 0.0002989 \nStep: 200/865, accuracy0.234, loss2.999, learning rate 0.0002980 \nStep: 300/865, accuracy0.156, loss3.074, learning rate 0.0002971 \nStep: 400/865, accuracy0.234, loss2.730, learning rate 0.0002963 \nStep: 500/865, accuracy0.203, loss2.713, learning rate 0.0002954 \nStep: 600/865, accuracy0.234, loss2.600, learning rate 0.0002945 \nStep: 700/865, accuracy0.328, loss2.330, learning rate 0.0002936 \nStep: 800/865, accuracy0.344, loss2.585, learning rate 0.0002928 \nStep: 864/865, accuracy0.387, loss2.554, learning rate 0.0002922 \nEpoch: 28/200, accuracy0.263, loss2.727, learning rate 0.000\nEstimated reamining runtime: 6:03:09.896279\n--Validation--\nValidation : Accuracy: 0.390, Loss: 2.227\n=== Epoch: 29 ===\nStep: 0/865, accuracy0.250, loss2.793, learning rate 0.0002922 \nStep: 100/865, accuracy0.250, loss2.815, learning rate 0.0002914 \nStep: 200/865, accuracy0.328, loss2.599, learning rate 0.0002905 \nStep: 300/865, accuracy0.234, loss2.765, learning rate 0.0002897 \nStep: 400/865, accuracy0.266, loss2.694, learning rate 0.0002889 \nStep: 500/865, accuracy0.219, loss2.685, learning rate 0.0002880 \nStep: 600/865, accuracy0.266, loss2.735, learning rate 0.0002872 \nStep: 700/865, accuracy0.281, loss2.691, learning rate 0.0002864 \nStep: 800/865, accuracy0.406, loss2.531, learning rate 0.0002856 \nStep: 864/865, accuracy0.290, loss2.389, learning rate 0.0002850 \nEpoch: 29/200, accuracy0.265, loss2.715, learning rate 0.000\nEstimated reamining runtime: 6:02:46.663313\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.392, Loss: 2.199\n=== Epoch: 30 ===\nStep: 0/865, accuracy0.203, loss2.736, learning rate 0.0002850 \nStep: 100/865, accuracy0.312, loss2.543, learning rate 0.0002842 \nStep: 200/865, accuracy0.344, loss2.582, learning rate 0.0002834 \nStep: 300/865, accuracy0.266, loss2.623, learning rate 0.0002826 \nStep: 400/865, accuracy0.297, loss2.611, learning rate 0.0002818 \nStep: 500/865, accuracy0.234, loss2.764, learning rate 0.0002810 \nStep: 600/865, accuracy0.312, loss2.637, learning rate 0.0002802 \nStep: 700/865, accuracy0.266, loss2.648, learning rate 0.0002794 \nStep: 800/865, accuracy0.172, loss2.949, learning rate 0.0002787 \nStep: 864/865, accuracy0.194, loss2.940, learning rate 0.0002782 \nEpoch: 30/200, accuracy0.267, loss2.698, learning rate 0.000\nEstimated reamining runtime: 6:02:21.847583\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.395, Loss: 2.182\n=== Epoch: 31 ===\nStep: 0/865, accuracy0.250, loss2.672, learning rate 0.0002782 \nStep: 100/865, accuracy0.250, loss2.741, learning rate 0.0002774 \nStep: 200/865, accuracy0.250, loss2.667, learning rate 0.0002766 \nStep: 300/865, accuracy0.266, loss2.774, learning rate 0.0002759 \nStep: 400/865, accuracy0.344, loss2.537, learning rate 0.0002751 \nStep: 500/865, accuracy0.297, loss2.478, learning rate 0.0002743 \nStep: 600/865, accuracy0.234, loss3.007, learning rate 0.0002736 \nStep: 700/865, accuracy0.297, loss2.862, learning rate 0.0002729 \nStep: 800/865, accuracy0.312, loss2.543, learning rate 0.0002721 \nStep: 864/865, accuracy0.290, loss2.687, learning rate 0.0002716 \nEpoch: 31/200, accuracy0.266, loss2.701, learning rate 0.000\nEstimated reamining runtime: 6:02:38.014750\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.387, Loss: 2.180\n=== Epoch: 32 ===\nStep: 0/865, accuracy0.266, loss2.816, learning rate 0.0002716 \nStep: 100/865, accuracy0.172, loss2.942, learning rate 0.0002709 \nStep: 200/865, accuracy0.312, loss2.654, learning rate 0.0002702 \nStep: 300/865, accuracy0.297, loss2.599, learning rate 0.0002694 \nStep: 400/865, accuracy0.312, loss2.826, learning rate 0.0002687 \nStep: 500/865, accuracy0.203, loss2.897, learning rate 0.0002680 \nStep: 600/865, accuracy0.250, loss2.831, learning rate 0.0002673 \nStep: 700/865, accuracy0.328, loss2.757, learning rate 0.0002666 \nStep: 800/865, accuracy0.281, loss2.682, learning rate 0.0002659 \nStep: 864/865, accuracy0.387, loss2.553, learning rate 0.0002654 \nEpoch: 32/200, accuracy0.270, loss2.690, learning rate 0.000\nEstimated reamining runtime: 6:03:25.622484\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.400, Loss: 2.166\n=== Epoch: 33 ===\nStep: 0/865, accuracy0.266, loss2.623, learning rate 0.0002654 \nStep: 100/865, accuracy0.219, loss2.675, learning rate 0.0002647 \nStep: 200/865, accuracy0.250, loss2.823, learning rate 0.0002640 \nStep: 300/865, accuracy0.203, loss2.814, learning rate 0.0002633 \nStep: 400/865, accuracy0.312, loss2.554, learning rate 0.0002626 \nStep: 500/865, accuracy0.281, loss2.572, learning rate 0.0002619 \nStep: 600/865, accuracy0.281, loss2.765, learning rate 0.0002612 \nStep: 700/865, accuracy0.172, loss2.943, learning rate 0.0002606 \nStep: 800/865, accuracy0.266, loss2.526, learning rate 0.0002599 \nStep: 864/865, accuracy0.290, loss2.775, learning rate 0.0002594 \nEpoch: 33/200, accuracy0.270, loss2.681, learning rate 0.000\nEstimated reamining runtime: 6:03:49.427736\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.403, Loss: 2.154\n=== Epoch: 34 ===\nStep: 0/865, accuracy0.266, loss2.820, learning rate 0.0002594 \nStep: 100/865, accuracy0.359, loss2.352, learning rate 0.0002588 \nStep: 200/865, accuracy0.250, loss2.780, learning rate 0.0002581 \nStep: 300/865, accuracy0.328, loss2.501, learning rate 0.0002574 \nStep: 400/865, accuracy0.312, loss2.722, learning rate 0.0002568 \nStep: 500/865, accuracy0.281, loss2.449, learning rate 0.0002561 \nStep: 600/865, accuracy0.328, loss2.548, learning rate 0.0002555 \nStep: 700/865, accuracy0.391, loss2.392, learning rate 0.0002548 \nStep: 800/865, accuracy0.266, loss2.549, learning rate 0.0002542 \nStep: 864/865, accuracy0.290, loss2.339, learning rate 0.0002537 \nEpoch: 34/200, accuracy0.272, loss2.685, learning rate 0.000\nEstimated reamining runtime: 6:04:12.522072\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.414, Loss: 2.143\n=== Epoch: 35 ===\nStep: 0/865, accuracy0.297, loss2.702, learning rate 0.0002537 \nStep: 100/865, accuracy0.297, loss2.508, learning rate 0.0002531 \nStep: 200/865, accuracy0.234, loss2.507, learning rate 0.0002525 \nStep: 300/865, accuracy0.297, loss2.596, learning rate 0.0002518 \nStep: 400/865, accuracy0.344, loss2.479, learning rate 0.0002512 \nStep: 500/865, accuracy0.359, loss2.384, learning rate 0.0002506 \nStep: 600/865, accuracy0.141, loss2.890, learning rate 0.0002499 \nStep: 700/865, accuracy0.328, loss2.735, learning rate 0.0002493 \nStep: 800/865, accuracy0.281, loss2.686, learning rate 0.0002487 \nStep: 864/865, accuracy0.290, loss2.617, learning rate 0.0002483 \nEpoch: 35/200, accuracy0.273, loss2.673, learning rate 0.000\nEstimated reamining runtime: 6:04:09.981699\n--Validation--\nValidation : Accuracy: 0.411, Loss: 2.146\n=== Epoch: 36 ===\nStep: 0/865, accuracy0.156, loss2.896, learning rate 0.0002483 \nStep: 100/865, accuracy0.297, loss2.537, learning rate 0.0002477 \nStep: 200/865, accuracy0.297, loss2.746, learning rate 0.0002471 \nStep: 300/865, accuracy0.344, loss2.409, learning rate 0.0002465 \nStep: 400/865, accuracy0.281, loss2.451, learning rate 0.0002459 \nStep: 500/865, accuracy0.172, loss2.827, learning rate 0.0002452 \nStep: 600/865, accuracy0.219, loss2.715, learning rate 0.0002446 \nStep: 700/865, accuracy0.266, loss2.597, learning rate 0.0002441 \nStep: 800/865, accuracy0.234, loss2.874, learning rate 0.0002435 \nStep: 864/865, accuracy0.226, loss3.036, learning rate 0.0002431 \nEpoch: 36/200, accuracy0.275, loss2.670, learning rate 0.000\nEstimated reamining runtime: 6:03:59.587172\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.409, Loss: 2.129\n=== Epoch: 37 ===\nStep: 0/865, accuracy0.250, loss2.595, learning rate 0.0002431 \nStep: 100/865, accuracy0.250, loss2.561, learning rate 0.0002425 \nStep: 200/865, accuracy0.172, loss2.716, learning rate 0.0002419 \nStep: 300/865, accuracy0.266, loss2.956, learning rate 0.0002413 \nStep: 400/865, accuracy0.203, loss2.959, learning rate 0.0002407 \nStep: 500/865, accuracy0.297, loss2.623, learning rate 0.0002402 \nStep: 600/865, accuracy0.188, loss2.724, learning rate 0.0002396 \nStep: 700/865, accuracy0.359, loss2.361, learning rate 0.0002390 \nStep: 800/865, accuracy0.328, loss2.487, learning rate 0.0002384 \nStep: 864/865, accuracy0.290, loss2.708, learning rate 0.0002381 \nEpoch: 37/200, accuracy0.274, loss2.667, learning rate 0.000\nEstimated reamining runtime: 6:04:03.981341\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.409, Loss: 2.127\n=== Epoch: 38 ===\nStep: 0/865, accuracy0.344, loss2.518, learning rate 0.0002381 \nStep: 100/865, accuracy0.391, loss2.389, learning rate 0.0002375 \nStep: 200/865, accuracy0.266, loss2.812, learning rate 0.0002369 \nStep: 300/865, accuracy0.312, loss2.371, learning rate 0.0002364 \nStep: 400/865, accuracy0.344, loss2.325, learning rate 0.0002358 \nStep: 500/865, accuracy0.219, loss2.830, learning rate 0.0002353 \nStep: 600/865, accuracy0.344, loss2.652, learning rate 0.0002347 \nStep: 700/865, accuracy0.188, loss2.796, learning rate 0.0002342 \nStep: 800/865, accuracy0.328, loss2.487, learning rate 0.0002336 \nStep: 864/865, accuracy0.226, loss2.580, learning rate 0.0002333 \nEpoch: 38/200, accuracy0.278, loss2.668, learning rate 0.000\nEstimated reamining runtime: 6:04:12.852693\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.417, Loss: 2.121\n=== Epoch: 39 ===\nStep: 0/865, accuracy0.234, loss2.726, learning rate 0.0002333 \nStep: 100/865, accuracy0.266, loss2.645, learning rate 0.0002327 \nStep: 200/865, accuracy0.281, loss2.620, learning rate 0.0002322 \nStep: 300/865, accuracy0.281, loss2.600, learning rate 0.0002316 \nStep: 400/865, accuracy0.344, loss2.540, learning rate 0.0002311 \nStep: 500/865, accuracy0.312, loss3.023, learning rate 0.0002306 \nStep: 600/865, accuracy0.281, loss2.687, learning rate 0.0002300 \nStep: 700/865, accuracy0.438, loss2.139, learning rate 0.0002295 \nStep: 800/865, accuracy0.266, loss2.776, learning rate 0.0002290 \nStep: 864/865, accuracy0.419, loss2.068, learning rate 0.0002287 \nEpoch: 39/200, accuracy0.279, loss2.653, learning rate 0.000\nEstimated reamining runtime: 6:04:01.133203\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.405, Loss: 2.116\n=== Epoch: 40 ===\nStep: 0/865, accuracy0.203, loss2.849, learning rate 0.0002286 \nStep: 100/865, accuracy0.250, loss2.501, learning rate 0.0002281 \nStep: 200/865, accuracy0.297, loss2.568, learning rate 0.0002276 \nStep: 300/865, accuracy0.203, loss2.981, learning rate 0.0002271 \nStep: 400/865, accuracy0.234, loss2.646, learning rate 0.0002266 \nStep: 500/865, accuracy0.344, loss2.328, learning rate 0.0002261 \nStep: 600/865, accuracy0.266, loss2.796, learning rate 0.0002256 \nStep: 700/865, accuracy0.375, loss2.562, learning rate 0.0002250 \nStep: 800/865, accuracy0.250, loss2.832, learning rate 0.0002245 \nStep: 864/865, accuracy0.355, loss2.670, learning rate 0.0002242 \nEpoch: 40/200, accuracy0.277, loss2.652, learning rate 0.000\nEstimated reamining runtime: 6:03:45.102779\n--Validation--\nValidation : Accuracy: 0.406, Loss: 2.138\n=== Epoch: 41 ===\nStep: 0/865, accuracy0.312, loss2.578, learning rate 0.0002242 \nStep: 100/865, accuracy0.344, loss2.357, learning rate 0.0002237 \nStep: 200/865, accuracy0.234, loss2.681, learning rate 0.0002232 \nStep: 300/865, accuracy0.203, loss2.915, learning rate 0.0002227 \nStep: 400/865, accuracy0.328, loss2.608, learning rate 0.0002222 \nStep: 500/865, accuracy0.234, loss2.872, learning rate 0.0002217 \nStep: 600/865, accuracy0.328, loss2.652, learning rate 0.0002212 \nStep: 700/865, accuracy0.234, loss2.789, learning rate 0.0002208 \nStep: 800/865, accuracy0.281, loss2.771, learning rate 0.0002203 \nStep: 864/865, accuracy0.161, loss3.249, learning rate 0.0002200 \nEpoch: 41/200, accuracy0.279, loss2.653, learning rate 0.000\nEstimated reamining runtime: 6:03:44.308367\n--Validation--\nValidation : Accuracy: 0.408, Loss: 2.128\n=== Epoch: 42 ===\nStep: 0/865, accuracy0.281, loss2.791, learning rate 0.0002199 \nStep: 100/865, accuracy0.234, loss2.687, learning rate 0.0002195 \nStep: 200/865, accuracy0.234, loss2.764, learning rate 0.0002190 \nStep: 300/865, accuracy0.375, loss2.614, learning rate 0.0002185 \nStep: 400/865, accuracy0.203, loss2.727, learning rate 0.0002180 \nStep: 500/865, accuracy0.312, loss2.644, learning rate 0.0002176 \nStep: 600/865, accuracy0.281, loss2.667, learning rate 0.0002171 \nStep: 700/865, accuracy0.297, loss2.684, learning rate 0.0002166 \nStep: 800/865, accuracy0.281, loss2.780, learning rate 0.0002161 \nStep: 864/865, accuracy0.194, loss2.717, learning rate 0.0002158 \nEpoch: 42/200, accuracy0.280, loss2.645, learning rate 0.000\nEstimated reamining runtime: 6:03:34.699280\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.426, Loss: 2.090\n=== Epoch: 43 ===\nStep: 0/865, accuracy0.328, loss2.580, learning rate 0.0002158 \nStep: 100/865, accuracy0.203, loss2.812, learning rate 0.0002154 \nStep: 200/865, accuracy0.297, loss2.654, learning rate 0.0002149 \nStep: 300/865, accuracy0.297, loss2.411, learning rate 0.0002145 \nStep: 400/865, accuracy0.188, loss2.761, learning rate 0.0002140 \nStep: 500/865, accuracy0.297, loss2.575, learning rate 0.0002135 \nStep: 600/865, accuracy0.250, loss2.502, learning rate 0.0002131 \nStep: 700/865, accuracy0.359, loss2.638, learning rate 0.0002126 \nStep: 800/865, accuracy0.266, loss2.693, learning rate 0.0002122 \nStep: 864/865, accuracy0.355, loss2.432, learning rate 0.0002119 \nEpoch: 43/200, accuracy0.281, loss2.643, learning rate 0.000\nEstimated reamining runtime: 6:03:36.985565\n--Validation--\nValidation : Accuracy: 0.407, Loss: 2.101\n=== Epoch: 44 ===\nStep: 0/865, accuracy0.125, loss3.032, learning rate 0.0002119 \nStep: 100/865, accuracy0.312, loss2.539, learning rate 0.0002114 \nStep: 200/865, accuracy0.375, loss2.535, learning rate 0.0002110 \nStep: 300/865, accuracy0.266, loss2.509, learning rate 0.0002105 \nStep: 400/865, accuracy0.328, loss2.500, learning rate 0.0002101 \nStep: 500/865, accuracy0.172, loss2.808, learning rate 0.0002097 \nStep: 600/865, accuracy0.281, loss2.526, learning rate 0.0002092 \nStep: 700/865, accuracy0.266, loss2.743, learning rate 0.0002088 \nStep: 800/865, accuracy0.281, loss2.741, learning rate 0.0002084 \nStep: 864/865, accuracy0.258, loss2.632, learning rate 0.0002081 \nEpoch: 44/200, accuracy0.282, loss2.636, learning rate 0.000\nEstimated reamining runtime: 6:03:33.610063\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.422, Loss: 2.089\n=== Epoch: 45 ===\nStep: 0/865, accuracy0.328, loss2.590, learning rate 0.0002081 \nStep: 100/865, accuracy0.328, loss2.563, learning rate 0.0002076 \nStep: 200/865, accuracy0.281, loss2.901, learning rate 0.0002072 \nStep: 300/865, accuracy0.328, loss2.444, learning rate 0.0002068 \nStep: 400/865, accuracy0.281, loss2.807, learning rate 0.0002064 \nStep: 500/865, accuracy0.203, loss2.643, learning rate 0.0002059 \nStep: 600/865, accuracy0.312, loss2.338, learning rate 0.0002055 \nStep: 700/865, accuracy0.391, loss2.578, learning rate 0.0002051 \nStep: 800/865, accuracy0.375, loss2.486, learning rate 0.0002047 \nStep: 864/865, accuracy0.452, loss2.190, learning rate 0.0002044 \nEpoch: 45/200, accuracy0.284, loss2.626, learning rate 0.000\nEstimated reamining runtime: 6:03:21.455176\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.418, Loss: 2.086\n=== Epoch: 46 ===\nStep: 0/865, accuracy0.266, loss2.423, learning rate 0.0002044 \nStep: 100/865, accuracy0.281, loss2.762, learning rate 0.0002040 \nStep: 200/865, accuracy0.281, loss2.691, learning rate 0.0002036 \nStep: 300/865, accuracy0.312, loss2.384, learning rate 0.0002031 \nStep: 400/865, accuracy0.344, loss2.753, learning rate 0.0002027 \nStep: 500/865, accuracy0.281, loss2.983, learning rate 0.0002023 \nStep: 600/865, accuracy0.203, loss2.674, learning rate 0.0002019 \nStep: 700/865, accuracy0.266, loss2.774, learning rate 0.0002015 \nStep: 800/865, accuracy0.344, loss2.462, learning rate 0.0002011 \nStep: 864/865, accuracy0.194, loss3.068, learning rate 0.0002008 \nEpoch: 46/200, accuracy0.285, loss2.629, learning rate 0.000\nEstimated reamining runtime: 6:03:04.154749\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.417, Loss: 2.075\n=== Epoch: 47 ===\nStep: 0/865, accuracy0.266, loss2.854, learning rate 0.0002008 \nStep: 100/865, accuracy0.344, loss2.162, learning rate 0.0002004 \nStep: 200/865, accuracy0.219, loss2.786, learning rate 0.0002000 \nStep: 300/865, accuracy0.250, loss2.955, learning rate 0.0001996 \nStep: 400/865, accuracy0.312, loss2.428, learning rate 0.0001992 \nStep: 500/865, accuracy0.250, loss2.500, learning rate 0.0001988 \nStep: 600/865, accuracy0.234, loss2.796, learning rate 0.0001985 \nStep: 700/865, accuracy0.312, loss2.583, learning rate 0.0001981 \nStep: 800/865, accuracy0.328, loss2.441, learning rate 0.0001977 \nStep: 864/865, accuracy0.387, loss2.292, learning rate 0.0001974 \nEpoch: 47/200, accuracy0.284, loss2.622, learning rate 0.000\nEstimated reamining runtime: 6:02:51.427015\n--Validation--\nValidation : Accuracy: 0.420, Loss: 2.079\n=== Epoch: 48 ===\nStep: 0/865, accuracy0.250, loss2.831, learning rate 0.0001974 \nStep: 100/865, accuracy0.344, loss2.446, learning rate 0.0001970 \nStep: 200/865, accuracy0.156, loss3.113, learning rate 0.0001966 \nStep: 300/865, accuracy0.312, loss2.589, learning rate 0.0001963 \nStep: 400/865, accuracy0.281, loss2.450, learning rate 0.0001959 \nStep: 500/865, accuracy0.344, loss2.455, learning rate 0.0001955 \nStep: 600/865, accuracy0.281, loss2.718, learning rate 0.0001951 \nStep: 700/865, accuracy0.297, loss2.542, learning rate 0.0001947 \nStep: 800/865, accuracy0.219, loss2.664, learning rate 0.0001943 \nStep: 864/865, accuracy0.194, loss2.665, learning rate 0.0001941 \nEpoch: 48/200, accuracy0.287, loss2.613, learning rate 0.000\nEstimated reamining runtime: 6:02:37.197585\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.420, Loss: 2.072\n=== Epoch: 49 ===\nStep: 0/865, accuracy0.234, loss2.481, learning rate 0.0001941 \nStep: 100/865, accuracy0.234, loss2.802, learning rate 0.0001937 \nStep: 200/865, accuracy0.328, loss2.721, learning rate 0.0001933 \nStep: 300/865, accuracy0.234, loss2.589, learning rate 0.0001930 \nStep: 400/865, accuracy0.359, loss2.524, learning rate 0.0001926 \nStep: 500/865, accuracy0.219, loss2.622, learning rate 0.0001922 \nStep: 600/865, accuracy0.344, loss2.509, learning rate 0.0001919 \nStep: 700/865, accuracy0.359, loss2.271, learning rate 0.0001915 \nStep: 800/865, accuracy0.188, loss2.784, learning rate 0.0001911 \nStep: 864/865, accuracy0.355, loss2.733, learning rate 0.0001909 \nEpoch: 49/200, accuracy0.287, loss2.613, learning rate 0.000\nEstimated reamining runtime: 6:02:12.751315\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.425, Loss: 2.066\n=== Epoch: 50 ===\nStep: 0/865, accuracy0.281, loss2.694, learning rate 0.0001909 \nStep: 100/865, accuracy0.344, loss2.508, learning rate 0.0001905 \nStep: 200/865, accuracy0.297, loss2.742, learning rate 0.0001902 \nStep: 300/865, accuracy0.188, loss2.814, learning rate 0.0001898 \nStep: 400/865, accuracy0.312, loss2.336, learning rate 0.0001894 \nStep: 500/865, accuracy0.250, loss2.592, learning rate 0.0001891 \nStep: 600/865, accuracy0.297, loss2.369, learning rate 0.0001887 \nStep: 700/865, accuracy0.297, loss2.641, learning rate 0.0001884 \nStep: 800/865, accuracy0.281, loss2.692, learning rate 0.0001880 \nStep: 864/865, accuracy0.290, loss2.493, learning rate 0.0001878 \nEpoch: 50/200, accuracy0.287, loss2.609, learning rate 0.000\nEstimated reamining runtime: 6:01:48.289553\n--Validation--\nValidation : Accuracy: 0.418, Loss: 2.070\n=== Epoch: 51 ===\nStep: 0/865, accuracy0.281, loss2.626, learning rate 0.0001878 \nStep: 100/865, accuracy0.312, loss2.442, learning rate 0.0001874 \nStep: 200/865, accuracy0.312, loss2.500, learning rate 0.0001871 \nStep: 300/865, accuracy0.297, loss2.603, learning rate 0.0001867 \nStep: 400/865, accuracy0.234, loss2.764, learning rate 0.0001864 \nStep: 500/865, accuracy0.297, loss2.365, learning rate 0.0001860 \nStep: 600/865, accuracy0.281, loss2.532, learning rate 0.0001857 \nStep: 700/865, accuracy0.234, loss2.516, learning rate 0.0001854 \nStep: 800/865, accuracy0.250, loss2.562, learning rate 0.0001850 \nStep: 864/865, accuracy0.290, loss2.436, learning rate 0.0001848 \nEpoch: 51/200, accuracy0.288, loss2.604, learning rate 0.000\nEstimated reamining runtime: 6:01:23.669795\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.422, Loss: 2.061\n=== Epoch: 52 ===\nStep: 0/865, accuracy0.359, loss2.428, learning rate 0.0001848 \nStep: 100/865, accuracy0.297, loss2.619, learning rate 0.0001845 \nStep: 200/865, accuracy0.266, loss2.637, learning rate 0.0001841 \nStep: 300/865, accuracy0.328, loss2.561, learning rate 0.0001838 \nStep: 400/865, accuracy0.328, loss2.490, learning rate 0.0001834 \nStep: 500/865, accuracy0.266, loss2.826, learning rate 0.0001831 \nStep: 600/865, accuracy0.281, loss2.536, learning rate 0.0001828 \nStep: 700/865, accuracy0.359, loss2.599, learning rate 0.0001824 \nStep: 800/865, accuracy0.266, loss2.393, learning rate 0.0001821 \nStep: 864/865, accuracy0.290, loss2.768, learning rate 0.0001819 \nEpoch: 52/200, accuracy0.289, loss2.606, learning rate 0.000\nEstimated reamining runtime: 6:00:59.013687\n--Validation--\nValidation : Accuracy: 0.422, Loss: 2.062\n=== Epoch: 53 ===\nStep: 0/865, accuracy0.391, loss2.325, learning rate 0.0001819 \nStep: 100/865, accuracy0.203, loss2.694, learning rate 0.0001816 \nStep: 200/865, accuracy0.234, loss2.750, learning rate 0.0001812 \nStep: 300/865, accuracy0.344, loss2.422, learning rate 0.0001809 \nStep: 400/865, accuracy0.234, loss2.581, learning rate 0.0001806 \nStep: 500/865, accuracy0.312, loss2.544, learning rate 0.0001802 \nStep: 600/865, accuracy0.281, loss2.527, learning rate 0.0001799 \nStep: 700/865, accuracy0.266, loss2.648, learning rate 0.0001796 \nStep: 800/865, accuracy0.266, loss2.527, learning rate 0.0001793 \nStep: 864/865, accuracy0.290, loss2.620, learning rate 0.0001791 \nEpoch: 53/200, accuracy0.289, loss2.605, learning rate 0.000\nEstimated reamining runtime: 6:00:45.511584\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.430, Loss: 2.045\n=== Epoch: 54 ===\nStep: 0/865, accuracy0.266, loss2.715, learning rate 0.0001791 \nStep: 100/865, accuracy0.266, loss2.820, learning rate 0.0001787 \nStep: 200/865, accuracy0.312, loss2.614, learning rate 0.0001784 \nStep: 300/865, accuracy0.172, loss2.883, learning rate 0.0001781 \nStep: 400/865, accuracy0.344, loss2.898, learning rate 0.0001778 \nStep: 500/865, accuracy0.250, loss2.569, learning rate 0.0001775 \nStep: 600/865, accuracy0.250, loss2.566, learning rate 0.0001772 \nStep: 700/865, accuracy0.219, loss2.794, learning rate 0.0001769 \nStep: 800/865, accuracy0.312, loss2.446, learning rate 0.0001765 \nStep: 864/865, accuracy0.290, loss2.703, learning rate 0.0001763 \nEpoch: 54/200, accuracy0.290, loss2.598, learning rate 0.000\nEstimated reamining runtime: 6:00:58.166883\n--Validation--\nValidation : Accuracy: 0.421, Loss: 2.050\n=== Epoch: 55 ===\nStep: 0/865, accuracy0.234, loss2.775, learning rate 0.0001763 \nStep: 100/865, accuracy0.297, loss2.543, learning rate 0.0001760 \nStep: 200/865, accuracy0.297, loss2.561, learning rate 0.0001757 \nStep: 300/865, accuracy0.281, loss2.619, learning rate 0.0001754 \nStep: 400/865, accuracy0.438, loss2.420, learning rate 0.0001751 \nStep: 500/865, accuracy0.266, loss2.595, learning rate 0.0001748 \nStep: 600/865, accuracy0.375, loss2.257, learning rate 0.0001745 \nStep: 700/865, accuracy0.328, loss2.377, learning rate 0.0001742 \nStep: 800/865, accuracy0.375, loss2.540, learning rate 0.0001739 \nStep: 864/865, accuracy0.387, loss2.148, learning rate 0.0001737 \nEpoch: 55/200, accuracy0.289, loss2.594, learning rate 0.000\nEstimated reamining runtime: 6:00:44.376919\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.434, Loss: 2.043\n=== Epoch: 56 ===\nStep: 0/865, accuracy0.359, loss2.447, learning rate 0.0001737 \nStep: 100/865, accuracy0.312, loss2.533, learning rate 0.0001734 \nStep: 200/865, accuracy0.188, loss2.685, learning rate 0.0001731 \nStep: 300/865, accuracy0.344, loss2.240, learning rate 0.0001728 \nStep: 400/865, accuracy0.234, loss2.933, learning rate 0.0001725 \nStep: 500/865, accuracy0.266, loss3.068, learning rate 0.0001722 \nStep: 600/865, accuracy0.250, loss2.584, learning rate 0.0001719 \nStep: 700/865, accuracy0.391, loss2.347, learning rate 0.0001716 \nStep: 800/865, accuracy0.234, loss2.511, learning rate 0.0001713 \nStep: 864/865, accuracy0.323, loss3.238, learning rate 0.0001711 \nEpoch: 56/200, accuracy0.291, loss2.598, learning rate 0.000\nEstimated reamining runtime: 6:00:24.865738\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.430, Loss: 2.040\n=== Epoch: 57 ===\nStep: 0/865, accuracy0.312, loss2.597, learning rate 0.0001711 \nStep: 100/865, accuracy0.250, loss2.473, learning rate 0.0001708 \nStep: 200/865, accuracy0.250, loss2.719, learning rate 0.0001705 \nStep: 300/865, accuracy0.219, loss2.713, learning rate 0.0001702 \nStep: 400/865, accuracy0.266, loss2.546, learning rate 0.0001700 \nStep: 500/865, accuracy0.297, loss2.423, learning rate 0.0001697 \nStep: 600/865, accuracy0.328, loss2.619, learning rate 0.0001694 \nStep: 700/865, accuracy0.297, loss2.596, learning rate 0.0001691 \nStep: 800/865, accuracy0.312, loss2.586, learning rate 0.0001688 \nStep: 864/865, accuracy0.226, loss2.803, learning rate 0.0001686 \nEpoch: 57/200, accuracy0.295, loss2.587, learning rate 0.000\nEstimated reamining runtime: 6:00:04.987645\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.427, Loss: 2.022\n=== Epoch: 58 ===\nStep: 0/865, accuracy0.266, loss2.807, learning rate 0.0001686 \nStep: 100/865, accuracy0.234, loss2.719, learning rate 0.0001683 \nStep: 200/865, accuracy0.422, loss2.338, learning rate 0.0001681 \nStep: 300/865, accuracy0.281, loss2.638, learning rate 0.0001678 \nStep: 400/865, accuracy0.312, loss2.548, learning rate 0.0001675 \nStep: 500/865, accuracy0.281, loss2.522, learning rate 0.0001672 \nStep: 600/865, accuracy0.297, loss2.487, learning rate 0.0001669 \nStep: 700/865, accuracy0.297, loss2.473, learning rate 0.0001667 \nStep: 800/865, accuracy0.266, loss2.812, learning rate 0.0001664 \nStep: 864/865, accuracy0.355, loss2.334, learning rate 0.0001662 \nEpoch: 58/200, accuracy0.291, loss2.593, learning rate 0.000\nEstimated reamining runtime: 5:59:47.884182\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.429, Loss: 2.017\n=== Epoch: 59 ===\nStep: 0/865, accuracy0.281, loss2.791, learning rate 0.0001662 \nStep: 100/865, accuracy0.250, loss2.543, learning rate 0.0001659 \nStep: 200/865, accuracy0.266, loss2.593, learning rate 0.0001656 \nStep: 300/865, accuracy0.297, loss2.650, learning rate 0.0001654 \nStep: 400/865, accuracy0.359, loss2.615, learning rate 0.0001651 \nStep: 500/865, accuracy0.328, loss2.518, learning rate 0.0001648 \nStep: 600/865, accuracy0.266, loss2.541, learning rate 0.0001646 \nStep: 700/865, accuracy0.219, loss2.667, learning rate 0.0001643 \nStep: 800/865, accuracy0.297, loss2.576, learning rate 0.0001640 \nStep: 864/865, accuracy0.129, loss2.852, learning rate 0.0001638 \nEpoch: 59/200, accuracy0.301, loss2.572, learning rate 0.000\nEstimated reamining runtime: 5:59:30.604871\n--Validation--\nValidation : Accuracy: 0.429, Loss: 2.030\n=== Epoch: 60 ===\nStep: 0/865, accuracy0.281, loss2.750, learning rate 0.0001638 \nStep: 100/865, accuracy0.281, loss2.612, learning rate 0.0001636 \nStep: 200/865, accuracy0.281, loss2.895, learning rate 0.0001633 \nStep: 300/865, accuracy0.312, loss2.737, learning rate 0.0001630 \nStep: 400/865, accuracy0.359, loss2.504, learning rate 0.0001628 \nStep: 500/865, accuracy0.359, loss2.483, learning rate 0.0001625 \nStep: 600/865, accuracy0.328, loss2.315, learning rate 0.0001622 \nStep: 700/865, accuracy0.281, loss2.377, learning rate 0.0001620 \nStep: 800/865, accuracy0.297, loss2.578, learning rate 0.0001617 \nStep: 864/865, accuracy0.258, loss2.808, learning rate 0.0001616 \nEpoch: 60/200, accuracy0.292, loss2.591, learning rate 0.000\nEstimated reamining runtime: 5:59:16.710741\n--Validation--\nValidation : Accuracy: 0.429, Loss: 2.026\n=== Epoch: 61 ===\nStep: 0/865, accuracy0.219, loss2.895, learning rate 0.0001616 \nStep: 100/865, accuracy0.406, loss2.178, learning rate 0.0001613 \nStep: 200/865, accuracy0.359, loss2.422, learning rate 0.0001610 \nStep: 300/865, accuracy0.234, loss2.837, learning rate 0.0001608 \nStep: 400/865, accuracy0.281, loss2.759, learning rate 0.0001605 \nStep: 500/865, accuracy0.297, loss2.533, learning rate 0.0001603 \nStep: 600/865, accuracy0.281, loss2.651, learning rate 0.0001600 \nStep: 700/865, accuracy0.266, loss2.700, learning rate 0.0001597 \nStep: 800/865, accuracy0.328, loss2.430, learning rate 0.0001595 \nStep: 864/865, accuracy0.323, loss2.692, learning rate 0.0001593 \nEpoch: 61/200, accuracy0.296, loss2.574, learning rate 0.000\nEstimated reamining runtime: 5:59:07.709425\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.434, Loss: 2.015\n=== Epoch: 62 ===\nStep: 0/865, accuracy0.281, loss2.338, learning rate 0.0001593 \nStep: 100/865, accuracy0.406, loss2.521, learning rate 0.0001591 \nStep: 200/865, accuracy0.250, loss2.797, learning rate 0.0001588 \nStep: 300/865, accuracy0.484, loss2.253, learning rate 0.0001586 \nStep: 400/865, accuracy0.281, loss2.841, learning rate 0.0001583 \nStep: 500/865, accuracy0.312, loss2.395, learning rate 0.0001581 \nStep: 600/865, accuracy0.203, loss2.822, learning rate 0.0001578 \nStep: 700/865, accuracy0.297, loss2.810, learning rate 0.0001576 \nStep: 800/865, accuracy0.344, loss2.219, learning rate 0.0001573 \nStep: 864/865, accuracy0.355, loss2.176, learning rate 0.0001572 \nEpoch: 62/200, accuracy0.296, loss2.580, learning rate 0.000\nEstimated reamining runtime: 5:58:57.690710\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.438, Loss: 2.012\n=== Epoch: 63 ===\nStep: 0/865, accuracy0.359, loss2.321, learning rate 0.0001572 \nStep: 100/865, accuracy0.312, loss2.674, learning rate 0.0001569 \nStep: 200/865, accuracy0.375, loss2.410, learning rate 0.0001567 \nStep: 300/865, accuracy0.250, loss2.633, learning rate 0.0001564 \nStep: 400/865, accuracy0.281, loss2.602, learning rate 0.0001562 \nStep: 500/865, accuracy0.297, loss2.632, learning rate 0.0001559 \nStep: 600/865, accuracy0.219, loss2.933, learning rate 0.0001557 \nStep: 700/865, accuracy0.188, loss2.865, learning rate 0.0001554 \nStep: 800/865, accuracy0.266, loss2.707, learning rate 0.0001552 \nStep: 864/865, accuracy0.226, loss2.504, learning rate 0.0001551 \nEpoch: 63/200, accuracy0.297, loss2.560, learning rate 0.000\nEstimated reamining runtime: 5:58:48.457023\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.440, Loss: 2.000\n=== Epoch: 64 ===\nStep: 0/865, accuracy0.250, loss2.591, learning rate 0.0001551 \nStep: 100/865, accuracy0.234, loss2.781, learning rate 0.0001548 \nStep: 200/865, accuracy0.250, loss2.709, learning rate 0.0001546 \nStep: 300/865, accuracy0.312, loss2.341, learning rate 0.0001543 \nStep: 400/865, accuracy0.266, loss2.639, learning rate 0.0001541 \nStep: 500/865, accuracy0.234, loss2.836, learning rate 0.0001539 \nStep: 600/865, accuracy0.250, loss2.690, learning rate 0.0001536 \nStep: 700/865, accuracy0.328, loss2.304, learning rate 0.0001534 \nStep: 800/865, accuracy0.281, loss2.675, learning rate 0.0001532 \nStep: 864/865, accuracy0.194, loss3.001, learning rate 0.0001530 \nEpoch: 64/200, accuracy0.296, loss2.573, learning rate 0.000\nEstimated reamining runtime: 5:58:42.011456\n--Validation--\nValidation : Accuracy: 0.431, Loss: 2.009\n=== Epoch: 65 ===\nStep: 0/865, accuracy0.266, loss2.817, learning rate 0.0001530 \nStep: 100/865, accuracy0.266, loss2.532, learning rate 0.0001528 \nStep: 200/865, accuracy0.250, loss2.639, learning rate 0.0001525 \nStep: 300/865, accuracy0.281, loss2.631, learning rate 0.0001523 \nStep: 400/865, accuracy0.359, loss2.257, learning rate 0.0001521 \nStep: 500/865, accuracy0.234, loss2.469, learning rate 0.0001518 \nStep: 600/865, accuracy0.422, loss2.215, learning rate 0.0001516 \nStep: 700/865, accuracy0.219, loss2.646, learning rate 0.0001514 \nStep: 800/865, accuracy0.406, loss2.252, learning rate 0.0001511 \nStep: 864/865, accuracy0.194, loss2.488, learning rate 0.0001510 \nEpoch: 65/200, accuracy0.297, loss2.565, learning rate 0.000\nEstimated reamining runtime: 5:58:37.399841\n--Validation--\nValidation : Accuracy: 0.436, Loss: 2.002\n=== Epoch: 66 ===\nStep: 0/865, accuracy0.391, loss2.530, learning rate 0.0001510 \nStep: 100/865, accuracy0.438, loss2.124, learning rate 0.0001508 \nStep: 200/865, accuracy0.328, loss2.683, learning rate 0.0001505 \nStep: 300/865, accuracy0.328, loss2.977, learning rate 0.0001503 \nStep: 400/865, accuracy0.344, loss2.387, learning rate 0.0001501 \nStep: 500/865, accuracy0.297, loss2.700, learning rate 0.0001499 \nStep: 600/865, accuracy0.297, loss2.632, learning rate 0.0001496 \nStep: 700/865, accuracy0.359, loss2.638, learning rate 0.0001494 \nStep: 800/865, accuracy0.266, loss2.431, learning rate 0.0001492 \nStep: 864/865, accuracy0.290, loss2.523, learning rate 0.0001491 \nEpoch: 66/200, accuracy0.300, loss2.558, learning rate 0.000\nEstimated reamining runtime: 5:58:42.744150\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.439, Loss: 1.991\n=== Epoch: 67 ===\nStep: 0/865, accuracy0.328, loss2.468, learning rate 0.0001491 \nStep: 100/865, accuracy0.328, loss2.520, learning rate 0.0001488 \nStep: 200/865, accuracy0.375, loss2.412, learning rate 0.0001486 \nStep: 300/865, accuracy0.266, loss2.801, learning rate 0.0001484 \nStep: 400/865, accuracy0.391, loss2.327, learning rate 0.0001482 \nStep: 500/865, accuracy0.266, loss2.427, learning rate 0.0001480 \nStep: 600/865, accuracy0.312, loss2.425, learning rate 0.0001477 \nStep: 700/865, accuracy0.172, loss2.771, learning rate 0.0001475 \nStep: 800/865, accuracy0.266, loss2.710, learning rate 0.0001473 \nStep: 864/865, accuracy0.226, loss2.509, learning rate 0.0001472 \nEpoch: 67/200, accuracy0.298, loss2.556, learning rate 0.000\nEstimated reamining runtime: 5:58:35.219606\n--Validation--\nValidation : Accuracy: 0.438, Loss: 1.996\n=== Epoch: 68 ===\nStep: 0/865, accuracy0.422, loss2.320, learning rate 0.0001472 \nStep: 100/865, accuracy0.250, loss2.758, learning rate 0.0001469 \nStep: 200/865, accuracy0.203, loss2.726, learning rate 0.0001467 \nStep: 300/865, accuracy0.328, loss2.398, learning rate 0.0001465 \nStep: 400/865, accuracy0.266, loss2.374, learning rate 0.0001463 \nStep: 500/865, accuracy0.422, loss2.223, learning rate 0.0001461 \nStep: 600/865, accuracy0.297, loss2.450, learning rate 0.0001459 \nStep: 700/865, accuracy0.328, loss2.455, learning rate 0.0001457 \nStep: 800/865, accuracy0.297, loss2.321, learning rate 0.0001454 \nStep: 864/865, accuracy0.323, loss2.479, learning rate 0.0001453 \nEpoch: 68/200, accuracy0.298, loss2.563, learning rate 0.000\nEstimated reamining runtime: 5:58:30.183861\n--Validation--\nValidation : Accuracy: 0.435, Loss: 2.011\n=== Epoch: 69 ===\nStep: 0/865, accuracy0.359, loss2.580, learning rate 0.0001453 \nStep: 100/865, accuracy0.359, loss2.271, learning rate 0.0001451 \nStep: 200/865, accuracy0.250, loss2.774, learning rate 0.0001449 \nStep: 300/865, accuracy0.234, loss2.712, learning rate 0.0001447 \nStep: 400/865, accuracy0.312, loss2.596, learning rate 0.0001445 \nStep: 500/865, accuracy0.359, loss2.401, learning rate 0.0001443 \nStep: 600/865, accuracy0.359, loss2.583, learning rate 0.0001441 \nStep: 700/865, accuracy0.359, loss2.451, learning rate 0.0001438 \nStep: 800/865, accuracy0.328, loss2.563, learning rate 0.0001436 \nStep: 864/865, accuracy0.323, loss2.741, learning rate 0.0001435 \nEpoch: 69/200, accuracy0.302, loss2.554, learning rate 0.000\nEstimated reamining runtime: 5:58:23.131742\n--Validation--\nValidation : Accuracy: 0.437, Loss: 1.992\n=== Epoch: 70 ===\nStep: 0/865, accuracy0.438, loss2.325, learning rate 0.0001435 \nStep: 100/865, accuracy0.281, loss2.700, learning rate 0.0001433 \nStep: 200/865, accuracy0.328, loss2.353, learning rate 0.0001431 \nStep: 300/865, accuracy0.312, loss2.307, learning rate 0.0001429 \nStep: 400/865, accuracy0.328, loss2.392, learning rate 0.0001427 \nStep: 500/865, accuracy0.328, loss2.549, learning rate 0.0001425 \nStep: 600/865, accuracy0.250, loss2.611, learning rate 0.0001423 \nStep: 700/865, accuracy0.297, loss2.576, learning rate 0.0001421 \nStep: 800/865, accuracy0.281, loss2.737, learning rate 0.0001419 \nStep: 864/865, accuracy0.194, loss2.854, learning rate 0.0001417 \nEpoch: 70/200, accuracy0.302, loss2.550, learning rate 0.000\nEstimated reamining runtime: 5:58:16.682896\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.445, Loss: 1.970\n=== Epoch: 71 ===\nStep: 0/865, accuracy0.250, loss2.774, learning rate 0.0001417 \nStep: 100/865, accuracy0.266, loss2.771, learning rate 0.0001415 \nStep: 200/865, accuracy0.281, loss2.446, learning rate 0.0001413 \nStep: 300/865, accuracy0.375, loss2.472, learning rate 0.0001411 \nStep: 400/865, accuracy0.375, loss2.398, learning rate 0.0001409 \nStep: 500/865, accuracy0.375, loss2.336, learning rate 0.0001407 \nStep: 600/865, accuracy0.234, loss2.430, learning rate 0.0001405 \nStep: 700/865, accuracy0.203, loss2.720, learning rate 0.0001404 \nStep: 800/865, accuracy0.328, loss2.504, learning rate 0.0001402 \nStep: 864/865, accuracy0.323, loss2.631, learning rate 0.0001400 \nEpoch: 71/200, accuracy0.304, loss2.547, learning rate 0.000\nEstimated reamining runtime: 5:58:10.820261\n--Validation--\nValidation : Accuracy: 0.440, Loss: 1.990\n=== Epoch: 72 ===\nStep: 0/865, accuracy0.328, loss2.587, learning rate 0.0001400 \nStep: 100/865, accuracy0.312, loss2.812, learning rate 0.0001398 \nStep: 200/865, accuracy0.250, loss2.461, learning rate 0.0001396 \nStep: 300/865, accuracy0.250, loss2.608, learning rate 0.0001394 \nStep: 400/865, accuracy0.281, loss2.385, learning rate 0.0001392 \nStep: 500/865, accuracy0.250, loss3.144, learning rate 0.0001391 \nStep: 600/865, accuracy0.281, loss2.700, learning rate 0.0001389 \nStep: 700/865, accuracy0.297, loss2.408, learning rate 0.0001387 \nStep: 800/865, accuracy0.312, loss2.539, learning rate 0.0001385 \nStep: 864/865, accuracy0.258, loss2.686, learning rate 0.0001384 \nEpoch: 72/200, accuracy0.303, loss2.550, learning rate 0.000\nEstimated reamining runtime: 5:58:03.483188\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.444, Loss: 1.967\n=== Epoch: 73 ===\nStep: 0/865, accuracy0.297, loss2.623, learning rate 0.0001384 \nStep: 100/865, accuracy0.188, loss2.884, learning rate 0.0001382 \nStep: 200/865, accuracy0.312, loss2.284, learning rate 0.0001380 \nStep: 300/865, accuracy0.406, loss2.243, learning rate 0.0001378 \nStep: 400/865, accuracy0.266, loss2.647, learning rate 0.0001376 \nStep: 500/865, accuracy0.297, loss2.404, learning rate 0.0001374 \nStep: 600/865, accuracy0.312, loss2.542, learning rate 0.0001372 \nStep: 700/865, accuracy0.359, loss2.283, learning rate 0.0001370 \nStep: 800/865, accuracy0.312, loss2.379, learning rate 0.0001368 \nStep: 864/865, accuracy0.323, loss2.394, learning rate 0.0001367 \nEpoch: 73/200, accuracy0.307, loss2.539, learning rate 0.000\nEstimated reamining runtime: 5:57:55.819773\n--Validation--\nValidation : Accuracy: 0.436, Loss: 1.980\n=== Epoch: 74 ===\nStep: 0/865, accuracy0.344, loss2.499, learning rate 0.0001367 \nStep: 100/865, accuracy0.219, loss2.654, learning rate 0.0001365 \nStep: 200/865, accuracy0.281, loss2.509, learning rate 0.0001363 \nStep: 300/865, accuracy0.312, loss2.292, learning rate 0.0001362 \nStep: 400/865, accuracy0.281, loss2.510, learning rate 0.0001360 \nStep: 500/865, accuracy0.281, loss2.411, learning rate 0.0001358 \nStep: 600/865, accuracy0.250, loss2.816, learning rate 0.0001356 \nStep: 700/865, accuracy0.297, loss2.722, learning rate 0.0001354 \nStep: 800/865, accuracy0.406, loss2.475, learning rate 0.0001352 \nStep: 864/865, accuracy0.258, loss2.486, learning rate 0.0001351 \nEpoch: 74/200, accuracy0.304, loss2.542, learning rate 0.000\nEstimated reamining runtime: 5:57:49.226034\n--Validation--\nValidation : Accuracy: 0.441, Loss: 1.975\n=== Epoch: 75 ===\nStep: 0/865, accuracy0.234, loss2.690, learning rate 0.0001351 \nStep: 100/865, accuracy0.250, loss2.541, learning rate 0.0001349 \nStep: 200/865, accuracy0.312, loss2.518, learning rate 0.0001348 \nStep: 300/865, accuracy0.328, loss2.687, learning rate 0.0001346 \nStep: 400/865, accuracy0.219, loss2.691, learning rate 0.0001344 \nStep: 500/865, accuracy0.312, loss2.672, learning rate 0.0001342 \nStep: 600/865, accuracy0.312, loss2.504, learning rate 0.0001340 \nStep: 700/865, accuracy0.266, loss2.886, learning rate 0.0001339 \nStep: 800/865, accuracy0.250, loss2.502, learning rate 0.0001337 \nStep: 864/865, accuracy0.258, loss2.760, learning rate 0.0001336 \nEpoch: 75/200, accuracy0.302, loss2.543, learning rate 0.000\nEstimated reamining runtime: 5:57:42.760261\n--Validation--\nValidation : Accuracy: 0.445, Loss: 1.968\n=== Epoch: 76 ===\nStep: 0/865, accuracy0.281, loss2.463, learning rate 0.0001336 \nStep: 100/865, accuracy0.375, loss2.392, learning rate 0.0001334 \nStep: 200/865, accuracy0.359, loss2.633, learning rate 0.0001332 \nStep: 300/865, accuracy0.281, loss2.913, learning rate 0.0001330 \nStep: 400/865, accuracy0.281, loss2.343, learning rate 0.0001328 \nStep: 500/865, accuracy0.297, loss2.482, learning rate 0.0001327 \nStep: 600/865, accuracy0.266, loss2.489, learning rate 0.0001325 \nStep: 700/865, accuracy0.234, loss2.899, learning rate 0.0001323 \nStep: 800/865, accuracy0.344, loss2.526, learning rate 0.0001321 \nStep: 864/865, accuracy0.290, loss2.479, learning rate 0.0001320 \nEpoch: 76/200, accuracy0.307, loss2.532, learning rate 0.000\nEstimated reamining runtime: 5:57:37.259878\n--Validation--\nValidation : Accuracy: 0.441, Loss: 1.969\n=== Epoch: 77 ===\nStep: 0/865, accuracy0.375, loss2.429, learning rate 0.0001320 \nStep: 100/865, accuracy0.344, loss2.420, learning rate 0.0001319 \nStep: 200/865, accuracy0.422, loss2.363, learning rate 0.0001317 \nStep: 300/865, accuracy0.312, loss2.873, learning rate 0.0001315 \nStep: 400/865, accuracy0.250, loss2.528, learning rate 0.0001313 \nStep: 500/865, accuracy0.422, loss2.179, learning rate 0.0001312 \nStep: 600/865, accuracy0.328, loss2.244, learning rate 0.0001310 \nStep: 700/865, accuracy0.328, loss2.422, learning rate 0.0001308 \nStep: 800/865, accuracy0.281, loss2.764, learning rate 0.0001307 \nStep: 864/865, accuracy0.290, loss2.670, learning rate 0.0001305 \nEpoch: 77/200, accuracy0.305, loss2.540, learning rate 0.000\nEstimated reamining runtime: 5:57:31.024187\n--Validation--\nValidation : Accuracy: 0.439, Loss: 1.978\n=== Epoch: 78 ===\nStep: 0/865, accuracy0.328, loss2.352, learning rate 0.0001305 \nStep: 100/865, accuracy0.328, loss2.530, learning rate 0.0001304 \nStep: 200/865, accuracy0.297, loss2.741, learning rate 0.0001302 \nStep: 300/865, accuracy0.312, loss2.539, learning rate 0.0001300 \nStep: 400/865, accuracy0.344, loss2.671, learning rate 0.0001299 \nStep: 500/865, accuracy0.266, loss2.612, learning rate 0.0001297 \nStep: 600/865, accuracy0.344, loss2.427, learning rate 0.0001295 \nStep: 700/865, accuracy0.188, loss2.888, learning rate 0.0001294 \nStep: 800/865, accuracy0.297, loss2.489, learning rate 0.0001292 \nStep: 864/865, accuracy0.258, loss2.369, learning rate 0.0001291 \nEpoch: 78/200, accuracy0.307, loss2.531, learning rate 0.000\nEstimated reamining runtime: 5:57:24.572474\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.444, Loss: 1.960\n=== Epoch: 79 ===\nStep: 0/865, accuracy0.328, loss2.425, learning rate 0.0001291 \nStep: 100/865, accuracy0.297, loss2.609, learning rate 0.0001289 \nStep: 200/865, accuracy0.219, loss2.642, learning rate 0.0001287 \nStep: 300/865, accuracy0.406, loss2.368, learning rate 0.0001286 \nStep: 400/865, accuracy0.297, loss2.404, learning rate 0.0001284 \nStep: 500/865, accuracy0.281, loss2.763, learning rate 0.0001283 \nStep: 600/865, accuracy0.250, loss2.669, learning rate 0.0001281 \nStep: 700/865, accuracy0.359, loss2.607, learning rate 0.0001279 \nStep: 800/865, accuracy0.281, loss2.337, learning rate 0.0001278 \nStep: 864/865, accuracy0.419, loss2.269, learning rate 0.0001277 \nEpoch: 79/200, accuracy0.304, loss2.536, learning rate 0.000\nEstimated reamining runtime: 5:57:18.602230\n--Validation--\nValidation : Accuracy: 0.438, Loss: 1.977\n=== Epoch: 80 ===\nStep: 0/865, accuracy0.344, loss2.584, learning rate 0.0001277 \nStep: 100/865, accuracy0.422, loss2.065, learning rate 0.0001275 \nStep: 200/865, accuracy0.156, loss2.588, learning rate 0.0001273 \nStep: 300/865, accuracy0.219, loss2.729, learning rate 0.0001272 \nStep: 400/865, accuracy0.391, loss2.251, learning rate 0.0001270 \nStep: 500/865, accuracy0.234, loss2.517, learning rate 0.0001268 \nStep: 600/865, accuracy0.344, loss2.697, learning rate 0.0001267 \nStep: 700/865, accuracy0.234, loss2.794, learning rate 0.0001265 \nStep: 800/865, accuracy0.375, loss2.264, learning rate 0.0001264 \nStep: 864/865, accuracy0.323, loss2.712, learning rate 0.0001263 \nEpoch: 80/200, accuracy0.307, loss2.534, learning rate 0.000\nEstimated reamining runtime: 5:57:12.068149\n--Validation--\nValidation : Accuracy: 0.441, Loss: 1.961\n=== Epoch: 81 ===\nStep: 0/865, accuracy0.328, loss2.534, learning rate 0.0001263 \nStep: 100/865, accuracy0.391, loss2.479, learning rate 0.0001261 \nStep: 200/865, accuracy0.359, loss2.439, learning rate 0.0001259 \nStep: 300/865, accuracy0.281, loss2.679, learning rate 0.0001258 \nStep: 400/865, accuracy0.266, loss2.647, learning rate 0.0001256 \nStep: 500/865, accuracy0.250, loss2.649, learning rate 0.0001255 \nStep: 600/865, accuracy0.312, loss2.673, learning rate 0.0001253 \nStep: 700/865, accuracy0.328, loss2.576, learning rate 0.0001252 \nStep: 800/865, accuracy0.250, loss2.582, learning rate 0.0001250 \nStep: 864/865, accuracy0.355, loss2.494, learning rate 0.0001249 \nEpoch: 81/200, accuracy0.307, loss2.534, learning rate 0.000\nEstimated reamining runtime: 5:57:07.867375\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.443, Loss: 1.960\n=== Epoch: 82 ===\nStep: 0/865, accuracy0.234, loss2.919, learning rate 0.0001249 \nStep: 100/865, accuracy0.281, loss2.618, learning rate 0.0001247 \nStep: 200/865, accuracy0.281, loss2.532, learning rate 0.0001246 \nStep: 300/865, accuracy0.406, loss2.192, learning rate 0.0001244 \nStep: 400/865, accuracy0.297, loss2.745, learning rate 0.0001243 \nStep: 500/865, accuracy0.250, loss2.712, learning rate 0.0001241 \nStep: 600/865, accuracy0.312, loss2.463, learning rate 0.0001240 \nStep: 700/865, accuracy0.406, loss2.342, learning rate 0.0001238 \nStep: 800/865, accuracy0.297, loss2.354, learning rate 0.0001237 \nStep: 864/865, accuracy0.290, loss2.299, learning rate 0.0001236 \nEpoch: 82/200, accuracy0.304, loss2.531, learning rate 0.000\nEstimated reamining runtime: 5:57:01.698983\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.445, Loss: 1.951\n=== Epoch: 83 ===\nStep: 0/865, accuracy0.359, loss2.268, learning rate 0.0001236 \nStep: 100/865, accuracy0.266, loss2.597, learning rate 0.0001234 \nStep: 200/865, accuracy0.266, loss2.548, learning rate 0.0001233 \nStep: 300/865, accuracy0.266, loss2.846, learning rate 0.0001231 \nStep: 400/865, accuracy0.250, loss2.840, learning rate 0.0001230 \nStep: 500/865, accuracy0.234, loss2.945, learning rate 0.0001228 \nStep: 600/865, accuracy0.172, loss2.820, learning rate 0.0001227 \nStep: 700/865, accuracy0.406, loss2.316, learning rate 0.0001225 \nStep: 800/865, accuracy0.391, loss2.360, learning rate 0.0001224 \nStep: 864/865, accuracy0.258, loss2.766, learning rate 0.0001223 \nEpoch: 83/200, accuracy0.307, loss2.530, learning rate 0.000\nEstimated reamining runtime: 5:56:50.908848\n--Validation--\nValidation : Accuracy: 0.439, Loss: 1.951\n=== Epoch: 84 ===\nStep: 0/865, accuracy0.328, loss2.690, learning rate 0.0001223 \nStep: 100/865, accuracy0.406, loss2.220, learning rate 0.0001221 \nStep: 200/865, accuracy0.344, loss2.558, learning rate 0.0001220 \nStep: 300/865, accuracy0.328, loss2.384, learning rate 0.0001218 \nStep: 400/865, accuracy0.422, loss2.395, learning rate 0.0001217 \nStep: 500/865, accuracy0.391, loss2.506, learning rate 0.0001215 \nStep: 600/865, accuracy0.359, loss2.382, learning rate 0.0001214 \nStep: 700/865, accuracy0.266, loss2.500, learning rate 0.0001212 \nStep: 800/865, accuracy0.266, loss2.672, learning rate 0.0001211 \nStep: 864/865, accuracy0.323, loss2.156, learning rate 0.0001210 \nEpoch: 84/200, accuracy0.311, loss2.524, learning rate 0.000\nEstimated reamining runtime: 5:56:41.978480\n--Validation--\nValidation : Accuracy: 0.445, Loss: 1.959\n=== Epoch: 85 ===\nStep: 0/865, accuracy0.297, loss2.511, learning rate 0.0001210 \nStep: 100/865, accuracy0.234, loss2.824, learning rate 0.0001208 \nStep: 200/865, accuracy0.328, loss2.543, learning rate 0.0001207 \nStep: 300/865, accuracy0.234, loss2.737, learning rate 0.0001205 \nStep: 400/865, accuracy0.281, loss2.817, learning rate 0.0001204 \nStep: 500/865, accuracy0.375, loss2.519, learning rate 0.0001203 \nStep: 600/865, accuracy0.281, loss2.642, learning rate 0.0001201 \nStep: 700/865, accuracy0.281, loss2.499, learning rate 0.0001200 \nStep: 800/865, accuracy0.312, loss2.700, learning rate 0.0001198 \nStep: 864/865, accuracy0.387, loss2.309, learning rate 0.0001197 \nEpoch: 85/200, accuracy0.311, loss2.519, learning rate 0.000\nEstimated reamining runtime: 5:56:32.976665\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.453, Loss: 1.935\n=== Epoch: 86 ===\nStep: 0/865, accuracy0.250, loss2.673, learning rate 0.0001197 \nStep: 100/865, accuracy0.359, loss2.380, learning rate 0.0001196 \nStep: 200/865, accuracy0.203, loss2.718, learning rate 0.0001194 \nStep: 300/865, accuracy0.297, loss2.542, learning rate 0.0001193 \nStep: 400/865, accuracy0.328, loss2.285, learning rate 0.0001192 \nStep: 500/865, accuracy0.469, loss1.991, learning rate 0.0001190 \nStep: 600/865, accuracy0.438, loss2.190, learning rate 0.0001189 \nStep: 700/865, accuracy0.391, loss2.590, learning rate 0.0001187 \nStep: 800/865, accuracy0.359, loss2.628, learning rate 0.0001186 \nStep: 864/865, accuracy0.226, loss2.977, learning rate 0.0001185 \nEpoch: 86/200, accuracy0.309, loss2.520, learning rate 0.000\nEstimated reamining runtime: 5:56:24.796515\n--Validation--\nValidation : Accuracy: 0.451, Loss: 1.951\n=== Epoch: 87 ===\nStep: 0/865, accuracy0.234, loss2.599, learning rate 0.0001185 \nStep: 100/865, accuracy0.281, loss2.527, learning rate 0.0001184 \nStep: 200/865, accuracy0.406, loss2.291, learning rate 0.0001182 \nStep: 300/865, accuracy0.297, loss2.179, learning rate 0.0001181 \nStep: 400/865, accuracy0.391, loss2.535, learning rate 0.0001179 \nStep: 500/865, accuracy0.281, loss2.691, learning rate 0.0001178 \nStep: 600/865, accuracy0.344, loss2.272, learning rate 0.0001177 \nStep: 700/865, accuracy0.375, loss2.744, learning rate 0.0001175 \nStep: 800/865, accuracy0.234, loss2.631, learning rate 0.0001174 \nStep: 864/865, accuracy0.226, loss2.972, learning rate 0.0001173 \nEpoch: 87/200, accuracy0.308, loss2.515, learning rate 0.000\nEstimated reamining runtime: 5:56:14.426867\n--Validation--\nValidation : Accuracy: 0.450, Loss: 1.935\n=== Epoch: 88 ===\nStep: 0/865, accuracy0.281, loss2.666, learning rate 0.0001173 \nStep: 100/865, accuracy0.297, loss2.375, learning rate 0.0001172 \nStep: 200/865, accuracy0.422, loss2.046, learning rate 0.0001170 \nStep: 300/865, accuracy0.172, loss2.862, learning rate 0.0001169 \nStep: 400/865, accuracy0.297, loss2.347, learning rate 0.0001167 \nStep: 500/865, accuracy0.281, loss2.712, learning rate 0.0001166 \nStep: 600/865, accuracy0.297, loss2.452, learning rate 0.0001165 \nStep: 700/865, accuracy0.328, loss2.485, learning rate 0.0001163 \nStep: 800/865, accuracy0.312, loss2.534, learning rate 0.0001162 \nStep: 864/865, accuracy0.613, loss2.077, learning rate 0.0001161 \nEpoch: 88/200, accuracy0.313, loss2.508, learning rate 0.000\nEstimated reamining runtime: 5:56:05.695928\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.449, Loss: 1.920\n=== Epoch: 89 ===\nStep: 0/865, accuracy0.188, loss2.832, learning rate 0.0001161 \nStep: 100/865, accuracy0.281, loss2.469, learning rate 0.0001160 \nStep: 200/865, accuracy0.344, loss2.426, learning rate 0.0001158 \nStep: 300/865, accuracy0.406, loss2.351, learning rate 0.0001157 \nStep: 400/865, accuracy0.281, loss2.608, learning rate 0.0001156 \nStep: 500/865, accuracy0.281, loss2.497, learning rate 0.0001154 \nStep: 600/865, accuracy0.188, loss2.757, learning rate 0.0001153 \nStep: 700/865, accuracy0.281, loss2.687, learning rate 0.0001152 \nStep: 800/865, accuracy0.344, loss2.437, learning rate 0.0001150 \nStep: 864/865, accuracy0.419, loss2.398, learning rate 0.0001150 \nEpoch: 89/200, accuracy0.311, loss2.512, learning rate 0.000\nEstimated reamining runtime: 5:55:55.566591\n--Validation--\nValidation : Accuracy: 0.454, Loss: 1.930\n=== Epoch: 90 ===\nStep: 0/865, accuracy0.297, loss2.524, learning rate 0.0001150 \nStep: 100/865, accuracy0.312, loss2.562, learning rate 0.0001148 \nStep: 200/865, accuracy0.281, loss2.498, learning rate 0.0001147 \nStep: 300/865, accuracy0.344, loss2.246, learning rate 0.0001146 \nStep: 400/865, accuracy0.359, loss2.413, learning rate 0.0001144 \nStep: 500/865, accuracy0.297, loss2.660, learning rate 0.0001143 \nStep: 600/865, accuracy0.250, loss2.600, learning rate 0.0001142 \nStep: 700/865, accuracy0.297, loss2.552, learning rate 0.0001140 \nStep: 800/865, accuracy0.250, loss2.665, learning rate 0.0001139 \nStep: 864/865, accuracy0.258, loss2.377, learning rate 0.0001138 \nEpoch: 90/200, accuracy0.313, loss2.508, learning rate 0.000\nEstimated reamining runtime: 5:55:47.146143\n--Validation--\nValidation : Accuracy: 0.455, Loss: 1.923\n=== Epoch: 91 ===\nStep: 0/865, accuracy0.266, loss2.459, learning rate 0.0001138 \nStep: 100/865, accuracy0.344, loss2.334, learning rate 0.0001137 \nStep: 200/865, accuracy0.297, loss2.594, learning rate 0.0001136 \nStep: 300/865, accuracy0.344, loss2.403, learning rate 0.0001134 \nStep: 400/865, accuracy0.344, loss2.576, learning rate 0.0001133 \nStep: 500/865, accuracy0.188, loss2.555, learning rate 0.0001132 \nStep: 600/865, accuracy0.359, loss2.382, learning rate 0.0001131 \nStep: 700/865, accuracy0.328, loss2.366, learning rate 0.0001129 \nStep: 800/865, accuracy0.266, loss2.570, learning rate 0.0001128 \nStep: 864/865, accuracy0.355, loss2.385, learning rate 0.0001127 \nEpoch: 91/200, accuracy0.311, loss2.505, learning rate 0.000\nEstimated reamining runtime: 5:55:37.641664\n--Validation--\nValidation : Accuracy: 0.453, Loss: 1.922\n=== Epoch: 92 ===\nStep: 0/865, accuracy0.359, loss2.629, learning rate 0.0001127 \nStep: 100/865, accuracy0.297, loss2.486, learning rate 0.0001126 \nStep: 200/865, accuracy0.219, loss2.679, learning rate 0.0001125 \nStep: 300/865, accuracy0.312, loss2.298, learning rate 0.0001123 \nStep: 400/865, accuracy0.359, loss2.407, learning rate 0.0001122 \nStep: 500/865, accuracy0.328, loss2.361, learning rate 0.0001121 \nStep: 600/865, accuracy0.312, loss2.418, learning rate 0.0001120 \nStep: 700/865, accuracy0.359, loss2.485, learning rate 0.0001118 \nStep: 800/865, accuracy0.250, loss2.778, learning rate 0.0001117 \nStep: 864/865, accuracy0.161, loss2.548, learning rate 0.0001116 \nEpoch: 92/200, accuracy0.310, loss2.507, learning rate 0.000\nEstimated reamining runtime: 5:55:28.940001\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.461, Loss: 1.919\n=== Epoch: 93 ===\nStep: 0/865, accuracy0.328, loss2.641, learning rate 0.0001116 \nStep: 100/865, accuracy0.375, loss2.161, learning rate 0.0001115 \nStep: 200/865, accuracy0.344, loss2.638, learning rate 0.0001114 \nStep: 300/865, accuracy0.328, loss2.489, learning rate 0.0001113 \nStep: 400/865, accuracy0.312, loss2.362, learning rate 0.0001111 \nStep: 500/865, accuracy0.219, loss2.810, learning rate 0.0001110 \nStep: 600/865, accuracy0.375, loss2.306, learning rate 0.0001109 \nStep: 700/865, accuracy0.375, loss2.231, learning rate 0.0001108 \nStep: 800/865, accuracy0.375, loss2.399, learning rate 0.0001106 \nStep: 864/865, accuracy0.258, loss2.570, learning rate 0.0001106 \nEpoch: 93/200, accuracy0.312, loss2.503, learning rate 0.000\nEstimated reamining runtime: 5:55:20.175613\n--Validation--\nValidation : Accuracy: 0.453, Loss: 1.922\n=== Epoch: 94 ===\nStep: 0/865, accuracy0.312, loss2.471, learning rate 0.0001106 \nStep: 100/865, accuracy0.281, loss2.558, learning rate 0.0001104 \nStep: 200/865, accuracy0.188, loss2.999, learning rate 0.0001103 \nStep: 300/865, accuracy0.312, loss2.469, learning rate 0.0001102 \nStep: 400/865, accuracy0.281, loss2.544, learning rate 0.0001101 \nStep: 500/865, accuracy0.328, loss2.526, learning rate 0.0001100 \nStep: 600/865, accuracy0.344, loss2.442, learning rate 0.0001098 \nStep: 700/865, accuracy0.203, loss3.039, learning rate 0.0001097 \nStep: 800/865, accuracy0.359, loss2.587, learning rate 0.0001096 \nStep: 864/865, accuracy0.355, loss2.306, learning rate 0.0001095 \nEpoch: 94/200, accuracy0.316, loss2.499, learning rate 0.000\nEstimated reamining runtime: 5:55:28.019754\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.461, Loss: 1.912\n=== Epoch: 95 ===\nStep: 0/865, accuracy0.438, loss2.148, learning rate 0.0001095 \nStep: 100/865, accuracy0.344, loss2.478, learning rate 0.0001094 \nStep: 200/865, accuracy0.312, loss2.405, learning rate 0.0001093 \nStep: 300/865, accuracy0.234, loss2.929, learning rate 0.0001092 \nStep: 400/865, accuracy0.344, loss2.814, learning rate 0.0001090 \nStep: 500/865, accuracy0.391, loss2.424, learning rate 0.0001089 \nStep: 600/865, accuracy0.422, loss2.196, learning rate 0.0001088 \nStep: 700/865, accuracy0.469, loss1.945, learning rate 0.0001087 \nStep: 800/865, accuracy0.328, loss2.375, learning rate 0.0001086 \nStep: 864/865, accuracy0.290, loss2.926, learning rate 0.0001085 \nEpoch: 95/200, accuracy0.315, loss2.495, learning rate 0.000\nEstimated reamining runtime: 5:55:47.828437\n--Validation--\nValidation : Accuracy: 0.453, Loss: 1.918\n=== Epoch: 96 ===\nStep: 0/865, accuracy0.328, loss2.407, learning rate 0.0001085 \nStep: 100/865, accuracy0.359, loss2.476, learning rate 0.0001084 \nStep: 200/865, accuracy0.281, loss2.674, learning rate 0.0001083 \nStep: 300/865, accuracy0.359, loss2.468, learning rate 0.0001081 \nStep: 400/865, accuracy0.375, loss2.192, learning rate 0.0001080 \nStep: 500/865, accuracy0.391, loss2.299, learning rate 0.0001079 \nStep: 600/865, accuracy0.312, loss2.412, learning rate 0.0001078 \nStep: 700/865, accuracy0.391, loss2.324, learning rate 0.0001077 \nStep: 800/865, accuracy0.266, loss2.617, learning rate 0.0001076 \nStep: 864/865, accuracy0.290, loss2.435, learning rate 0.0001075 \nEpoch: 96/200, accuracy0.312, loss2.500, learning rate 0.000\nEstimated reamining runtime: 5:55:46.158864\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.455, Loss: 1.912\n=== Epoch: 97 ===\nStep: 0/865, accuracy0.250, loss2.674, learning rate 0.0001075 \nStep: 100/865, accuracy0.281, loss2.488, learning rate 0.0001074 \nStep: 200/865, accuracy0.375, loss2.300, learning rate 0.0001073 \nStep: 300/865, accuracy0.312, loss2.287, learning rate 0.0001071 \nStep: 400/865, accuracy0.406, loss2.321, learning rate 0.0001070 \nStep: 500/865, accuracy0.281, loss2.567, learning rate 0.0001069 \nStep: 600/865, accuracy0.250, loss2.612, learning rate 0.0001068 \nStep: 700/865, accuracy0.297, loss2.551, learning rate 0.0001067 \nStep: 800/865, accuracy0.359, loss2.306, learning rate 0.0001066 \nStep: 864/865, accuracy0.290, loss2.437, learning rate 0.0001065 \nEpoch: 97/200, accuracy0.314, loss2.494, learning rate 0.000\nEstimated reamining runtime: 5:55:45.064536\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.459, Loss: 1.910\n=== Epoch: 98 ===\nStep: 0/865, accuracy0.281, loss2.640, learning rate 0.0001065 \nStep: 100/865, accuracy0.250, loss2.479, learning rate 0.0001064 \nStep: 200/865, accuracy0.328, loss2.496, learning rate 0.0001063 \nStep: 300/865, accuracy0.312, loss2.417, learning rate 0.0001062 \nStep: 400/865, accuracy0.312, loss2.747, learning rate 0.0001060 \nStep: 500/865, accuracy0.219, loss2.766, learning rate 0.0001059 \nStep: 600/865, accuracy0.250, loss2.686, learning rate 0.0001058 \nStep: 700/865, accuracy0.391, loss2.219, learning rate 0.0001057 \nStep: 800/865, accuracy0.328, loss2.281, learning rate 0.0001056 \nStep: 864/865, accuracy0.323, loss2.459, learning rate 0.0001055 \nEpoch: 98/200, accuracy0.313, loss2.493, learning rate 0.000\nEstimated reamining runtime: 5:55:44.326343\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.454, Loss: 1.906\n=== Epoch: 99 ===\nStep: 0/865, accuracy0.328, loss2.447, learning rate 0.0001055 \nStep: 100/865, accuracy0.344, loss2.329, learning rate 0.0001054 \nStep: 200/865, accuracy0.250, loss2.598, learning rate 0.0001053 \nStep: 300/865, accuracy0.359, loss2.173, learning rate 0.0001052 \nStep: 400/865, accuracy0.219, loss2.720, learning rate 0.0001051 \nStep: 500/865, accuracy0.328, loss2.454, learning rate 0.0001050 \nStep: 600/865, accuracy0.359, loss2.455, learning rate 0.0001049 \nStep: 700/865, accuracy0.219, loss2.959, learning rate 0.0001047 \nStep: 800/865, accuracy0.344, loss2.382, learning rate 0.0001046 \nStep: 864/865, accuracy0.355, loss2.485, learning rate 0.0001046 \nEpoch: 99/200, accuracy0.315, loss2.489, learning rate 0.000\nEstimated reamining runtime: 5:55:49.651535\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.457, Loss: 1.904\n=== Epoch: 100 ===\nStep: 0/865, accuracy0.297, loss2.448, learning rate 0.0001046 \nStep: 100/865, accuracy0.250, loss2.791, learning rate 0.0001045 \nStep: 200/865, accuracy0.281, loss2.751, learning rate 0.0001043 \nStep: 300/865, accuracy0.281, loss2.649, learning rate 0.0001042 \nStep: 400/865, accuracy0.297, loss2.667, learning rate 0.0001041 \nStep: 500/865, accuracy0.391, loss2.506, learning rate 0.0001040 \nStep: 600/865, accuracy0.391, loss2.225, learning rate 0.0001039 \nStep: 700/865, accuracy0.359, loss2.300, learning rate 0.0001038 \nStep: 800/865, accuracy0.312, loss2.591, learning rate 0.0001037 \nStep: 864/865, accuracy0.290, loss2.605, learning rate 0.0001036 \nEpoch: 100/200, accuracy0.315, loss2.492, learning rate 0.000\nEstimated reamining runtime: 5:55:46.583396\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.461, Loss: 1.902\n=== Epoch: 101 ===\nStep: 0/865, accuracy0.344, loss2.384, learning rate 0.0001036 \nStep: 100/865, accuracy0.406, loss2.417, learning rate 0.0001035 \nStep: 200/865, accuracy0.266, loss2.560, learning rate 0.0001034 \nStep: 300/865, accuracy0.281, loss2.703, learning rate 0.0001033 \nStep: 400/865, accuracy0.234, loss2.648, learning rate 0.0001032 \nStep: 500/865, accuracy0.328, loss2.531, learning rate 0.0001031 \nStep: 600/865, accuracy0.250, loss2.451, learning rate 0.0001030 \nStep: 700/865, accuracy0.344, loss2.224, learning rate 0.0001029 \nStep: 800/865, accuracy0.422, loss2.485, learning rate 0.0001028 \nStep: 864/865, accuracy0.355, loss2.813, learning rate 0.0001027 \nEpoch: 101/200, accuracy0.315, loss2.492, learning rate 0.000\nEstimated reamining runtime: 5:55:44.145764\n--Validation--\nValidation : Accuracy: 0.461, Loss: 1.907\n=== Epoch: 102 ===\nStep: 0/865, accuracy0.375, loss2.153, learning rate 0.0001027 \nStep: 100/865, accuracy0.344, loss2.290, learning rate 0.0001026 \nStep: 200/865, accuracy0.312, loss2.369, learning rate 0.0001025 \nStep: 300/865, accuracy0.281, loss2.557, learning rate 0.0001024 \nStep: 400/865, accuracy0.391, loss2.268, learning rate 0.0001023 \nStep: 500/865, accuracy0.375, loss2.275, learning rate 0.0001022 \nStep: 600/865, accuracy0.281, loss2.624, learning rate 0.0001021 \nStep: 700/865, accuracy0.297, loss2.343, learning rate 0.0001020 \nStep: 800/865, accuracy0.328, loss2.372, learning rate 0.0001019 \nStep: 864/865, accuracy0.355, loss2.775, learning rate 0.0001018 \nEpoch: 102/200, accuracy0.316, loss2.489, learning rate 0.000\nEstimated reamining runtime: 5:55:39.911838\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.457, Loss: 1.897\n=== Epoch: 103 ===\nStep: 0/865, accuracy0.344, loss2.617, learning rate 0.0001018 \nStep: 100/865, accuracy0.297, loss2.625, learning rate 0.0001017 \nStep: 200/865, accuracy0.234, loss2.917, learning rate 0.0001016 \nStep: 300/865, accuracy0.281, loss2.573, learning rate 0.0001015 \nStep: 400/865, accuracy0.219, loss2.603, learning rate 0.0001014 \nStep: 500/865, accuracy0.344, loss2.226, learning rate 0.0001013 \nStep: 600/865, accuracy0.266, loss2.679, learning rate 0.0001012 \nStep: 700/865, accuracy0.281, loss2.415, learning rate 0.0001011 \nStep: 800/865, accuracy0.297, loss2.327, learning rate 0.0001010 \nStep: 864/865, accuracy0.290, loss2.342, learning rate 0.0001009 \nEpoch: 103/200, accuracy0.318, loss2.480, learning rate 0.000\nEstimated reamining runtime: 5:55:35.487068\n--Validation--\nValidation : Accuracy: 0.456, Loss: 1.903\n=== Epoch: 104 ===\nStep: 0/865, accuracy0.250, loss2.397, learning rate 0.0001009 \nStep: 100/865, accuracy0.266, loss2.746, learning rate 0.0001008 \nStep: 200/865, accuracy0.359, loss2.336, learning rate 0.0001007 \nStep: 300/865, accuracy0.234, loss2.535, learning rate 0.0001006 \nStep: 400/865, accuracy0.281, loss2.525, learning rate 0.0001005 \nStep: 500/865, accuracy0.312, loss2.513, learning rate 0.0001004 \nStep: 600/865, accuracy0.188, loss2.535, learning rate 0.0001003 \nStep: 700/865, accuracy0.266, loss2.200, learning rate 0.0001002 \nStep: 800/865, accuracy0.375, loss2.336, learning rate 0.0001001 \nStep: 864/865, accuracy0.355, loss2.449, learning rate 0.0001000 \nEpoch: 104/200, accuracy0.316, loss2.484, learning rate 0.000\nEstimated reamining runtime: 5:55:30.712018\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.461, Loss: 1.894\n=== Epoch: 105 ===\nStep: 0/865, accuracy0.328, loss2.349, learning rate 0.0001000 \nStep: 100/865, accuracy0.281, loss2.694, learning rate 0.0000999 \nStep: 200/865, accuracy0.328, loss2.576, learning rate 0.0000998 \nStep: 300/865, accuracy0.250, loss2.669, learning rate 0.0000997 \nStep: 400/865, accuracy0.422, loss2.293, learning rate 0.0000996 \nStep: 500/865, accuracy0.188, loss2.914, learning rate 0.0000995 \nStep: 600/865, accuracy0.406, loss2.113, learning rate 0.0000994 \nStep: 700/865, accuracy0.297, loss2.518, learning rate 0.0000993 \nStep: 800/865, accuracy0.328, loss2.554, learning rate 0.0000992 \nStep: 864/865, accuracy0.452, loss2.117, learning rate 0.0000992 \nEpoch: 105/200, accuracy0.317, loss2.484, learning rate 0.000\nEstimated reamining runtime: 5:55:26.753598\n--Validation--\nValidation : Accuracy: 0.455, Loss: 1.896\n=== Epoch: 106 ===\nStep: 0/865, accuracy0.281, loss2.601, learning rate 0.0000992 \nStep: 100/865, accuracy0.219, loss2.696, learning rate 0.0000991 \nStep: 200/865, accuracy0.312, loss2.540, learning rate 0.0000990 \nStep: 300/865, accuracy0.359, loss2.425, learning rate 0.0000989 \nStep: 400/865, accuracy0.266, loss2.439, learning rate 0.0000988 \nStep: 500/865, accuracy0.344, loss2.473, learning rate 0.0000987 \nStep: 600/865, accuracy0.312, loss2.550, learning rate 0.0000986 \nStep: 700/865, accuracy0.234, loss2.499, learning rate 0.0000985 \nStep: 800/865, accuracy0.203, loss2.572, learning rate 0.0000984 \nStep: 864/865, accuracy0.290, loss2.578, learning rate 0.0000983 \nEpoch: 106/200, accuracy0.318, loss2.483, learning rate 0.000\nEstimated reamining runtime: 5:55:22.820939\n--Validation--\nValidation : Accuracy: 0.453, Loss: 1.903\n=== Epoch: 107 ===\nStep: 0/865, accuracy0.344, loss2.506, learning rate 0.0000983 \nStep: 100/865, accuracy0.391, loss2.272, learning rate 0.0000982 \nStep: 200/865, accuracy0.328, loss2.456, learning rate 0.0000981 \nStep: 300/865, accuracy0.297, loss2.439, learning rate 0.0000980 \nStep: 400/865, accuracy0.375, loss2.589, learning rate 0.0000980 \nStep: 500/865, accuracy0.312, loss2.432, learning rate 0.0000979 \nStep: 600/865, accuracy0.344, loss2.517, learning rate 0.0000978 \nStep: 700/865, accuracy0.234, loss3.057, learning rate 0.0000977 \nStep: 800/865, accuracy0.406, loss2.538, learning rate 0.0000976 \nStep: 864/865, accuracy0.290, loss2.381, learning rate 0.0000975 \nEpoch: 107/200, accuracy0.318, loss2.487, learning rate 0.000\nEstimated reamining runtime: 5:55:19.185490\n--Validation--\nValidation : Accuracy: 0.457, Loss: 1.909\n=== Epoch: 108 ===\nStep: 0/865, accuracy0.453, loss2.221, learning rate 0.0000975 \nStep: 100/865, accuracy0.359, loss2.622, learning rate 0.0000974 \nStep: 200/865, accuracy0.281, loss2.722, learning rate 0.0000973 \nStep: 300/865, accuracy0.359, loss2.526, learning rate 0.0000972 \nStep: 400/865, accuracy0.406, loss2.006, learning rate 0.0000971 \nStep: 500/865, accuracy0.312, loss2.360, learning rate 0.0000970 \nStep: 600/865, accuracy0.406, loss2.411, learning rate 0.0000969 \nStep: 700/865, accuracy0.359, loss2.356, learning rate 0.0000968 \nStep: 800/865, accuracy0.344, loss2.421, learning rate 0.0000968 \nStep: 864/865, accuracy0.387, loss2.393, learning rate 0.0000967 \nEpoch: 108/200, accuracy0.320, loss2.481, learning rate 0.000\nEstimated reamining runtime: 5:55:15.962736\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.458, Loss: 1.893\n=== Epoch: 109 ===\nStep: 0/865, accuracy0.438, loss2.047, learning rate 0.0000967 \nStep: 100/865, accuracy0.281, loss2.597, learning rate 0.0000966 \nStep: 200/865, accuracy0.391, loss2.206, learning rate 0.0000965 \nStep: 300/865, accuracy0.328, loss2.501, learning rate 0.0000964 \nStep: 400/865, accuracy0.312, loss2.424, learning rate 0.0000963 \nStep: 500/865, accuracy0.406, loss2.273, learning rate 0.0000962 \nStep: 600/865, accuracy0.281, loss2.616, learning rate 0.0000961 \nStep: 700/865, accuracy0.312, loss2.426, learning rate 0.0000960 \nStep: 800/865, accuracy0.312, loss2.664, learning rate 0.0000960 \nStep: 864/865, accuracy0.323, loss2.548, learning rate 0.0000959 \nEpoch: 109/200, accuracy0.317, loss2.486, learning rate 0.000\nEstimated reamining runtime: 5:55:11.600485\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.460, Loss: 1.887\n=== Epoch: 110 ===\nStep: 0/865, accuracy0.469, loss2.277, learning rate 0.0000959 \nStep: 100/865, accuracy0.312, loss2.826, learning rate 0.0000958 \nStep: 200/865, accuracy0.297, loss2.490, learning rate 0.0000957 \nStep: 300/865, accuracy0.250, loss2.457, learning rate 0.0000956 \nStep: 400/865, accuracy0.328, loss2.295, learning rate 0.0000955 \nStep: 500/865, accuracy0.266, loss2.687, learning rate 0.0000954 \nStep: 600/865, accuracy0.250, loss2.810, learning rate 0.0000953 \nStep: 700/865, accuracy0.312, loss2.353, learning rate 0.0000953 \nStep: 800/865, accuracy0.297, loss2.750, learning rate 0.0000952 \nStep: 864/865, accuracy0.419, loss1.892, learning rate 0.0000951 \nEpoch: 110/200, accuracy0.321, loss2.478, learning rate 0.000\nEstimated reamining runtime: 5:55:08.985814\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.468, Loss: 1.874\n=== Epoch: 111 ===\nStep: 0/865, accuracy0.312, loss2.418, learning rate 0.0000951 \nStep: 100/865, accuracy0.328, loss2.748, learning rate 0.0000950 \nStep: 200/865, accuracy0.328, loss2.567, learning rate 0.0000949 \nStep: 300/865, accuracy0.406, loss2.363, learning rate 0.0000948 \nStep: 400/865, accuracy0.422, loss2.372, learning rate 0.0000947 \nStep: 500/865, accuracy0.250, loss2.449, learning rate 0.0000947 \nStep: 600/865, accuracy0.281, loss2.602, learning rate 0.0000946 \nStep: 700/865, accuracy0.266, loss2.377, learning rate 0.0000945 \nStep: 800/865, accuracy0.344, loss2.420, learning rate 0.0000944 \nStep: 864/865, accuracy0.452, loss2.134, learning rate 0.0000943 \nEpoch: 111/200, accuracy0.318, loss2.479, learning rate 0.000\nEstimated reamining runtime: 5:55:05.210696\n--Validation--\nValidation : Accuracy: 0.462, Loss: 1.891\n=== Epoch: 112 ===\nStep: 0/865, accuracy0.328, loss2.688, learning rate 0.0000943 \nStep: 100/865, accuracy0.359, loss2.300, learning rate 0.0000942 \nStep: 200/865, accuracy0.328, loss2.562, learning rate 0.0000941 \nStep: 300/865, accuracy0.203, loss2.688, learning rate 0.0000941 \nStep: 400/865, accuracy0.297, loss2.306, learning rate 0.0000940 \nStep: 500/865, accuracy0.312, loss2.651, learning rate 0.0000939 \nStep: 600/865, accuracy0.266, loss2.638, learning rate 0.0000938 \nStep: 700/865, accuracy0.344, loss2.369, learning rate 0.0000937 \nStep: 800/865, accuracy0.328, loss2.380, learning rate 0.0000936 \nStep: 864/865, accuracy0.258, loss2.633, learning rate 0.0000936 \nEpoch: 112/200, accuracy0.319, loss2.474, learning rate 0.000\nEstimated reamining runtime: 5:55:01.173490\n--Validation--\nValidation : Accuracy: 0.462, Loss: 1.887\n=== Epoch: 113 ===\nStep: 0/865, accuracy0.281, loss2.709, learning rate 0.0000936 \nStep: 100/865, accuracy0.375, loss2.333, learning rate 0.0000935 \nStep: 200/865, accuracy0.375, loss2.309, learning rate 0.0000934 \nStep: 300/865, accuracy0.375, loss2.234, learning rate 0.0000933 \nStep: 400/865, accuracy0.344, loss2.374, learning rate 0.0000932 \nStep: 500/865, accuracy0.312, loss2.279, learning rate 0.0000931 \nStep: 600/865, accuracy0.344, loss2.208, learning rate 0.0000930 \nStep: 700/865, accuracy0.359, loss2.635, learning rate 0.0000930 \nStep: 800/865, accuracy0.375, loss2.296, learning rate 0.0000929 \nStep: 864/865, accuracy0.387, loss2.278, learning rate 0.0000928 \nEpoch: 113/200, accuracy0.320, loss2.474, learning rate 0.000\nEstimated reamining runtime: 5:54:56.925310\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.466, Loss: 1.871\n=== Epoch: 114 ===\nStep: 0/865, accuracy0.312, loss2.482, learning rate 0.0000928 \nStep: 100/865, accuracy0.250, loss2.699, learning rate 0.0000927 \nStep: 200/865, accuracy0.234, loss2.709, learning rate 0.0000926 \nStep: 300/865, accuracy0.438, loss2.460, learning rate 0.0000926 \nStep: 400/865, accuracy0.422, loss2.305, learning rate 0.0000925 \nStep: 500/865, accuracy0.375, loss2.160, learning rate 0.0000924 \nStep: 600/865, accuracy0.422, loss2.266, learning rate 0.0000923 \nStep: 700/865, accuracy0.406, loss2.132, learning rate 0.0000922 \nStep: 800/865, accuracy0.359, loss2.605, learning rate 0.0000921 \nStep: 864/865, accuracy0.258, loss2.703, learning rate 0.0000921 \nEpoch: 114/200, accuracy0.324, loss2.466, learning rate 0.000\nEstimated reamining runtime: 5:54:52.935380\n--Validation--\nValidation : Accuracy: 0.465, Loss: 1.881\n=== Epoch: 115 ===\nStep: 0/865, accuracy0.422, loss2.344, learning rate 0.0000921 \nStep: 100/865, accuracy0.297, loss2.583, learning rate 0.0000920 \nStep: 200/865, accuracy0.219, loss2.584, learning rate 0.0000919 \nStep: 300/865, accuracy0.266, loss2.465, learning rate 0.0000918 \nStep: 400/865, accuracy0.281, loss2.556, learning rate 0.0000917 \nStep: 500/865, accuracy0.297, loss2.602, learning rate 0.0000917 \nStep: 600/865, accuracy0.328, loss2.516, learning rate 0.0000916 \nStep: 700/865, accuracy0.297, loss2.506, learning rate 0.0000915 \nStep: 800/865, accuracy0.359, loss2.385, learning rate 0.0000914 \nStep: 864/865, accuracy0.226, loss2.350, learning rate 0.0000913 \nEpoch: 115/200, accuracy0.320, loss2.472, learning rate 0.000\nEstimated reamining runtime: 5:54:48.756399\n--Validation--\nValidation : Accuracy: 0.467, Loss: 1.877\n=== Epoch: 116 ===\nStep: 0/865, accuracy0.312, loss2.664, learning rate 0.0000913 \nStep: 100/865, accuracy0.312, loss2.469, learning rate 0.0000913 \nStep: 200/865, accuracy0.359, loss2.408, learning rate 0.0000912 \nStep: 300/865, accuracy0.312, loss2.339, learning rate 0.0000911 \nStep: 400/865, accuracy0.344, loss2.366, learning rate 0.0000910 \nStep: 500/865, accuracy0.422, loss2.256, learning rate 0.0000909 \nStep: 600/865, accuracy0.328, loss2.604, learning rate 0.0000908 \nStep: 700/865, accuracy0.438, loss2.157, learning rate 0.0000908 \nStep: 800/865, accuracy0.359, loss2.167, learning rate 0.0000907 \nStep: 864/865, accuracy0.355, loss2.439, learning rate 0.0000906 \nEpoch: 116/200, accuracy0.319, loss2.473, learning rate 0.000\nEstimated reamining runtime: 5:54:44.974463\n--Validation--\nValidation : Accuracy: 0.467, Loss: 1.874\n=== Epoch: 117 ===\nStep: 0/865, accuracy0.359, loss2.240, learning rate 0.0000906 \nStep: 100/865, accuracy0.344, loss2.322, learning rate 0.0000905 \nStep: 200/865, accuracy0.281, loss2.497, learning rate 0.0000905 \nStep: 300/865, accuracy0.359, loss2.508, learning rate 0.0000904 \nStep: 400/865, accuracy0.359, loss2.240, learning rate 0.0000903 \nStep: 500/865, accuracy0.359, loss2.560, learning rate 0.0000902 \nStep: 600/865, accuracy0.406, loss2.532, learning rate 0.0000901 \nStep: 700/865, accuracy0.266, loss2.542, learning rate 0.0000901 \nStep: 800/865, accuracy0.312, loss2.488, learning rate 0.0000900 \nStep: 864/865, accuracy0.290, loss2.341, learning rate 0.0000899 \nEpoch: 117/200, accuracy0.322, loss2.468, learning rate 0.000\nEstimated reamining runtime: 5:54:37.523603\n--Validation--\nValidation : Accuracy: 0.467, Loss: 1.873\n=== Epoch: 118 ===\nStep: 0/865, accuracy0.266, loss2.658, learning rate 0.0000899 \nStep: 100/865, accuracy0.281, loss2.678, learning rate 0.0000898 \nStep: 200/865, accuracy0.375, loss2.171, learning rate 0.0000898 \nStep: 300/865, accuracy0.422, loss2.210, learning rate 0.0000897 \nStep: 400/865, accuracy0.281, loss2.424, learning rate 0.0000896 \nStep: 500/865, accuracy0.359, loss2.340, learning rate 0.0000895 \nStep: 600/865, accuracy0.469, loss1.931, learning rate 0.0000894 \nStep: 700/865, accuracy0.234, loss2.534, learning rate 0.0000894 \nStep: 800/865, accuracy0.234, loss2.594, learning rate 0.0000893 \nStep: 864/865, accuracy0.323, loss2.861, learning rate 0.0000892 \nEpoch: 118/200, accuracy0.323, loss2.463, learning rate 0.000\nEstimated reamining runtime: 5:54:30.677376\n--Validation--\nValidation : Accuracy: 0.465, Loss: 1.879\n=== Epoch: 119 ===\nStep: 0/865, accuracy0.375, loss2.311, learning rate 0.0000892 \nStep: 100/865, accuracy0.359, loss2.346, learning rate 0.0000892 \nStep: 200/865, accuracy0.312, loss2.677, learning rate 0.0000891 \nStep: 300/865, accuracy0.406, loss2.382, learning rate 0.0000890 \nStep: 400/865, accuracy0.188, loss2.644, learning rate 0.0000889 \nStep: 500/865, accuracy0.219, loss2.561, learning rate 0.0000888 \nStep: 600/865, accuracy0.391, loss2.251, learning rate 0.0000888 \nStep: 700/865, accuracy0.312, loss2.208, learning rate 0.0000887 \nStep: 800/865, accuracy0.391, loss2.617, learning rate 0.0000886 \nStep: 864/865, accuracy0.452, loss2.220, learning rate 0.0000885 \nEpoch: 119/200, accuracy0.325, loss2.461, learning rate 0.000\nEstimated reamining runtime: 5:54:24.884879\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.469, Loss: 1.871\n=== Epoch: 120 ===\nStep: 0/865, accuracy0.250, loss2.622, learning rate 0.0000885 \nStep: 100/865, accuracy0.266, loss2.764, learning rate 0.0000885 \nStep: 200/865, accuracy0.391, loss2.373, learning rate 0.0000884 \nStep: 300/865, accuracy0.250, loss2.600, learning rate 0.0000883 \nStep: 400/865, accuracy0.422, loss2.323, learning rate 0.0000882 \nStep: 500/865, accuracy0.359, loss2.179, learning rate 0.0000882 \nStep: 600/865, accuracy0.328, loss2.337, learning rate 0.0000881 \nStep: 700/865, accuracy0.344, loss2.354, learning rate 0.0000880 \nStep: 800/865, accuracy0.422, loss2.239, learning rate 0.0000879 \nStep: 864/865, accuracy0.387, loss2.728, learning rate 0.0000879 \nEpoch: 120/200, accuracy0.321, loss2.463, learning rate 0.000\nEstimated reamining runtime: 5:54:20.808773\n--Validation--\nValidation : Accuracy: 0.466, Loss: 1.886\n=== Epoch: 121 ===\nStep: 0/865, accuracy0.328, loss2.553, learning rate 0.0000879 \nStep: 100/865, accuracy0.312, loss2.322, learning rate 0.0000878 \nStep: 200/865, accuracy0.344, loss2.306, learning rate 0.0000877 \nStep: 300/865, accuracy0.375, loss2.404, learning rate 0.0000876 \nStep: 400/865, accuracy0.297, loss2.574, learning rate 0.0000876 \nStep: 500/865, accuracy0.312, loss2.376, learning rate 0.0000875 \nStep: 600/865, accuracy0.391, loss2.430, learning rate 0.0000874 \nStep: 700/865, accuracy0.250, loss2.600, learning rate 0.0000873 \nStep: 800/865, accuracy0.344, loss2.233, learning rate 0.0000873 \nStep: 864/865, accuracy0.226, loss2.559, learning rate 0.0000872 \nEpoch: 121/200, accuracy0.323, loss2.462, learning rate 0.000\nEstimated reamining runtime: 5:54:13.696753\n--Validation--\nValidation : Accuracy: 0.463, Loss: 1.876\n=== Epoch: 122 ===\nStep: 0/865, accuracy0.375, loss2.301, learning rate 0.0000872 \nStep: 100/865, accuracy0.469, loss2.284, learning rate 0.0000871 \nStep: 200/865, accuracy0.281, loss2.748, learning rate 0.0000871 \nStep: 300/865, accuracy0.344, loss2.367, learning rate 0.0000870 \nStep: 400/865, accuracy0.297, loss2.774, learning rate 0.0000869 \nStep: 500/865, accuracy0.375, loss2.343, learning rate 0.0000868 \nStep: 600/865, accuracy0.359, loss2.320, learning rate 0.0000868 \nStep: 700/865, accuracy0.344, loss2.537, learning rate 0.0000867 \nStep: 800/865, accuracy0.344, loss2.250, learning rate 0.0000866 \nStep: 864/865, accuracy0.258, loss2.345, learning rate 0.0000866 \nEpoch: 122/200, accuracy0.325, loss2.457, learning rate 0.000\nEstimated reamining runtime: 5:54:07.231055\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.468, Loss: 1.868\n=== Epoch: 123 ===\nStep: 0/865, accuracy0.250, loss2.647, learning rate 0.0000866 \nStep: 100/865, accuracy0.406, loss2.416, learning rate 0.0000865 \nStep: 200/865, accuracy0.328, loss2.475, learning rate 0.0000864 \nStep: 300/865, accuracy0.375, loss2.154, learning rate 0.0000863 \nStep: 400/865, accuracy0.344, loss2.411, learning rate 0.0000863 \nStep: 500/865, accuracy0.281, loss2.531, learning rate 0.0000862 \nStep: 600/865, accuracy0.406, loss2.613, learning rate 0.0000861 \nStep: 700/865, accuracy0.297, loss2.733, learning rate 0.0000860 \nStep: 800/865, accuracy0.328, loss2.366, learning rate 0.0000860 \nStep: 864/865, accuracy0.258, loss2.326, learning rate 0.0000859 \nEpoch: 123/200, accuracy0.322, loss2.467, learning rate 0.000\nEstimated reamining runtime: 5:54:00.591583\n--Validation--\nValidation : Accuracy: 0.464, Loss: 1.871\n=== Epoch: 124 ===\nStep: 0/865, accuracy0.281, loss2.723, learning rate 0.0000859 \nStep: 100/865, accuracy0.312, loss2.474, learning rate 0.0000858 \nStep: 200/865, accuracy0.266, loss2.366, learning rate 0.0000858 \nStep: 300/865, accuracy0.391, loss2.314, learning rate 0.0000857 \nStep: 400/865, accuracy0.250, loss2.738, learning rate 0.0000856 \nStep: 500/865, accuracy0.406, loss2.282, learning rate 0.0000855 \nStep: 600/865, accuracy0.375, loss2.413, learning rate 0.0000855 \nStep: 700/865, accuracy0.328, loss2.379, learning rate 0.0000854 \nStep: 800/865, accuracy0.359, loss2.526, learning rate 0.0000853 \nStep: 864/865, accuracy0.258, loss2.555, learning rate 0.0000853 \nEpoch: 124/200, accuracy0.324, loss2.461, learning rate 0.000\nEstimated reamining runtime: 5:53:53.296723\n--Validation--\nValidation : Accuracy: 0.464, Loss: 1.881\n=== Epoch: 125 ===\nStep: 0/865, accuracy0.375, loss2.266, learning rate 0.0000853 \nStep: 100/865, accuracy0.328, loss2.491, learning rate 0.0000852 \nStep: 200/865, accuracy0.266, loss2.608, learning rate 0.0000851 \nStep: 300/865, accuracy0.422, loss2.206, learning rate 0.0000851 \nStep: 400/865, accuracy0.281, loss2.681, learning rate 0.0000850 \nStep: 500/865, accuracy0.297, loss2.543, learning rate 0.0000849 \nStep: 600/865, accuracy0.281, loss2.567, learning rate 0.0000848 \nStep: 700/865, accuracy0.250, loss2.687, learning rate 0.0000848 \nStep: 800/865, accuracy0.344, loss2.455, learning rate 0.0000847 \nStep: 864/865, accuracy0.355, loss1.870, learning rate 0.0000847 \nEpoch: 125/200, accuracy0.320, loss2.472, learning rate 0.000\nEstimated reamining runtime: 5:53:47.202061\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.473, Loss: 1.861\n=== Epoch: 126 ===\nStep: 0/865, accuracy0.422, loss2.202, learning rate 0.0000847 \nStep: 100/865, accuracy0.359, loss2.526, learning rate 0.0000846 \nStep: 200/865, accuracy0.406, loss2.130, learning rate 0.0000845 \nStep: 300/865, accuracy0.344, loss2.511, learning rate 0.0000844 \nStep: 400/865, accuracy0.391, loss2.392, learning rate 0.0000844 \nStep: 500/865, accuracy0.297, loss2.573, learning rate 0.0000843 \nStep: 600/865, accuracy0.328, loss2.433, learning rate 0.0000842 \nStep: 700/865, accuracy0.266, loss2.547, learning rate 0.0000842 \nStep: 800/865, accuracy0.344, loss2.520, learning rate 0.0000841 \nStep: 864/865, accuracy0.290, loss2.344, learning rate 0.0000840 \nEpoch: 126/200, accuracy0.323, loss2.455, learning rate 0.000\nEstimated reamining runtime: 5:53:40.690445\n--Validation--\nValidation : Accuracy: 0.464, Loss: 1.874\n=== Epoch: 127 ===\nStep: 0/865, accuracy0.422, loss2.059, learning rate 0.0000840 \nStep: 100/865, accuracy0.234, loss2.565, learning rate 0.0000840 \nStep: 200/865, accuracy0.391, loss2.228, learning rate 0.0000839 \nStep: 300/865, accuracy0.391, loss2.253, learning rate 0.0000838 \nStep: 400/865, accuracy0.203, loss2.832, learning rate 0.0000838 \nStep: 500/865, accuracy0.266, loss2.667, learning rate 0.0000837 \nStep: 600/865, accuracy0.297, loss2.629, learning rate 0.0000836 \nStep: 700/865, accuracy0.281, loss2.740, learning rate 0.0000835 \nStep: 800/865, accuracy0.344, loss2.463, learning rate 0.0000835 \nStep: 864/865, accuracy0.226, loss2.533, learning rate 0.0000834 \nEpoch: 127/200, accuracy0.325, loss2.448, learning rate 0.000\nEstimated reamining runtime: 5:53:34.431374\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.468, Loss: 1.860\n=== Epoch: 128 ===\nStep: 0/865, accuracy0.406, loss2.314, learning rate 0.0000834 \nStep: 100/865, accuracy0.344, loss2.455, learning rate 0.0000834 \nStep: 200/865, accuracy0.422, loss1.990, learning rate 0.0000833 \nStep: 300/865, accuracy0.328, loss2.408, learning rate 0.0000832 \nStep: 400/865, accuracy0.266, loss2.475, learning rate 0.0000832 \nStep: 500/865, accuracy0.344, loss2.156, learning rate 0.0000831 \nStep: 600/865, accuracy0.344, loss2.424, learning rate 0.0000830 \nStep: 700/865, accuracy0.234, loss2.665, learning rate 0.0000829 \nStep: 800/865, accuracy0.250, loss2.556, learning rate 0.0000829 \nStep: 864/865, accuracy0.258, loss2.493, learning rate 0.0000828 \nEpoch: 128/200, accuracy0.324, loss2.456, learning rate 0.000\nEstimated reamining runtime: 5:53:29.604039\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.467, Loss: 1.857\n=== Epoch: 129 ===\nStep: 0/865, accuracy0.359, loss2.451, learning rate 0.0000828 \nStep: 100/865, accuracy0.219, loss2.726, learning rate 0.0000828 \nStep: 200/865, accuracy0.344, loss2.444, learning rate 0.0000827 \nStep: 300/865, accuracy0.344, loss2.405, learning rate 0.0000826 \nStep: 400/865, accuracy0.250, loss2.460, learning rate 0.0000826 \nStep: 500/865, accuracy0.328, loss2.659, learning rate 0.0000825 \nStep: 600/865, accuracy0.359, loss2.220, learning rate 0.0000824 \nStep: 700/865, accuracy0.297, loss2.529, learning rate 0.0000824 \nStep: 800/865, accuracy0.312, loss2.586, learning rate 0.0000823 \nStep: 864/865, accuracy0.290, loss2.462, learning rate 0.0000822 \nEpoch: 129/200, accuracy0.323, loss2.464, learning rate 0.000\nEstimated reamining runtime: 5:53:28.246383\n--Validation--\nValidation : Accuracy: 0.471, Loss: 1.865\n=== Epoch: 130 ===\nStep: 0/865, accuracy0.406, loss2.310, learning rate 0.0000822 \nStep: 100/865, accuracy0.312, loss2.472, learning rate 0.0000822 \nStep: 200/865, accuracy0.219, loss2.753, learning rate 0.0000821 \nStep: 300/865, accuracy0.328, loss2.615, learning rate 0.0000820 \nStep: 400/865, accuracy0.375, loss2.241, learning rate 0.0000820 \nStep: 500/865, accuracy0.344, loss2.427, learning rate 0.0000819 \nStep: 600/865, accuracy0.234, loss2.847, learning rate 0.0000818 \nStep: 700/865, accuracy0.328, loss2.447, learning rate 0.0000818 \nStep: 800/865, accuracy0.250, loss2.829, learning rate 0.0000817 \nStep: 864/865, accuracy0.323, loss2.684, learning rate 0.0000817 \nEpoch: 130/200, accuracy0.327, loss2.454, learning rate 0.000\nEstimated reamining runtime: 5:53:25.397444\n--Validation--\nValidation : Accuracy: 0.472, Loss: 1.860\n=== Epoch: 131 ===\nStep: 0/865, accuracy0.328, loss2.313, learning rate 0.0000817 \nStep: 100/865, accuracy0.328, loss2.531, learning rate 0.0000816 \nStep: 200/865, accuracy0.297, loss2.460, learning rate 0.0000815 \nStep: 300/865, accuracy0.297, loss2.602, learning rate 0.0000815 \nStep: 400/865, accuracy0.250, loss2.783, learning rate 0.0000814 \nStep: 500/865, accuracy0.391, loss2.392, learning rate 0.0000813 \nStep: 600/865, accuracy0.312, loss2.355, learning rate 0.0000813 \nStep: 700/865, accuracy0.328, loss2.541, learning rate 0.0000812 \nStep: 800/865, accuracy0.328, loss2.224, learning rate 0.0000811 \nStep: 864/865, accuracy0.323, loss2.579, learning rate 0.0000811 \nEpoch: 131/200, accuracy0.326, loss2.452, learning rate 0.000\nEstimated reamining runtime: 5:53:22.599613\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.470, Loss: 1.857\n=== Epoch: 132 ===\nStep: 0/865, accuracy0.297, loss2.486, learning rate 0.0000811 \nStep: 100/865, accuracy0.328, loss2.342, learning rate 0.0000810 \nStep: 200/865, accuracy0.391, loss2.191, learning rate 0.0000810 \nStep: 300/865, accuracy0.328, loss2.663, learning rate 0.0000809 \nStep: 400/865, accuracy0.188, loss2.871, learning rate 0.0000808 \nStep: 500/865, accuracy0.266, loss2.455, learning rate 0.0000808 \nStep: 600/865, accuracy0.266, loss2.638, learning rate 0.0000807 \nStep: 700/865, accuracy0.266, loss2.543, learning rate 0.0000806 \nStep: 800/865, accuracy0.328, loss2.302, learning rate 0.0000806 \nStep: 864/865, accuracy0.226, loss2.841, learning rate 0.0000805 \nEpoch: 132/200, accuracy0.325, loss2.454, learning rate 0.000\nEstimated reamining runtime: 5:53:20.002840\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.469, Loss: 1.854\n=== Epoch: 133 ===\nStep: 0/865, accuracy0.391, loss2.252, learning rate 0.0000805 \nStep: 100/865, accuracy0.344, loss2.427, learning rate 0.0000805 \nStep: 200/865, accuracy0.359, loss2.266, learning rate 0.0000804 \nStep: 300/865, accuracy0.359, loss2.310, learning rate 0.0000803 \nStep: 400/865, accuracy0.250, loss2.617, learning rate 0.0000803 \nStep: 500/865, accuracy0.422, loss2.252, learning rate 0.0000802 \nStep: 600/865, accuracy0.297, loss2.396, learning rate 0.0000801 \nStep: 700/865, accuracy0.328, loss2.194, learning rate 0.0000801 \nStep: 800/865, accuracy0.312, loss2.608, learning rate 0.0000800 \nStep: 864/865, accuracy0.355, loss2.471, learning rate 0.0000800 \nEpoch: 133/200, accuracy0.327, loss2.447, learning rate 0.000\nEstimated reamining runtime: 5:53:23.133666\n--Validation--\nValidation : Accuracy: 0.469, Loss: 1.862\n=== Epoch: 134 ===\nStep: 0/865, accuracy0.375, loss2.392, learning rate 0.0000800 \nStep: 100/865, accuracy0.281, loss2.939, learning rate 0.0000799 \nStep: 200/865, accuracy0.453, loss2.175, learning rate 0.0000798 \nStep: 300/865, accuracy0.375, loss2.388, learning rate 0.0000798 \nStep: 400/865, accuracy0.234, loss2.883, learning rate 0.0000797 \nStep: 500/865, accuracy0.359, loss2.207, learning rate 0.0000797 \nStep: 600/865, accuracy0.391, loss2.353, learning rate 0.0000796 \nStep: 700/865, accuracy0.359, loss2.356, learning rate 0.0000795 \nStep: 800/865, accuracy0.297, loss2.571, learning rate 0.0000795 \nStep: 864/865, accuracy0.258, loss2.792, learning rate 0.0000794 \nEpoch: 134/200, accuracy0.328, loss2.444, learning rate 0.000\nEstimated reamining runtime: 5:53:20.205482\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.476, Loss: 1.852\n=== Epoch: 135 ===\nStep: 0/865, accuracy0.328, loss2.381, learning rate 0.0000794 \nStep: 100/865, accuracy0.172, loss2.787, learning rate 0.0000794 \nStep: 200/865, accuracy0.281, loss2.325, learning rate 0.0000793 \nStep: 300/865, accuracy0.281, loss2.547, learning rate 0.0000792 \nStep: 400/865, accuracy0.312, loss2.232, learning rate 0.0000792 \nStep: 500/865, accuracy0.344, loss2.393, learning rate 0.0000791 \nStep: 600/865, accuracy0.266, loss2.755, learning rate 0.0000790 \nStep: 700/865, accuracy0.406, loss2.208, learning rate 0.0000790 \nStep: 800/865, accuracy0.312, loss2.326, learning rate 0.0000789 \nStep: 864/865, accuracy0.355, loss2.733, learning rate 0.0000789 \nEpoch: 135/200, accuracy0.327, loss2.446, learning rate 0.000\nEstimated reamining runtime: 5:53:17.794965\n--Validation--\nValidation : Accuracy: 0.474, Loss: 1.860\n=== Epoch: 136 ===\nStep: 0/865, accuracy0.297, loss2.857, learning rate 0.0000789 \nStep: 100/865, accuracy0.344, loss2.353, learning rate 0.0000788 \nStep: 200/865, accuracy0.344, loss2.449, learning rate 0.0000788 \nStep: 300/865, accuracy0.281, loss2.317, learning rate 0.0000787 \nStep: 400/865, accuracy0.344, loss2.580, learning rate 0.0000786 \nStep: 500/865, accuracy0.281, loss2.506, learning rate 0.0000786 \nStep: 600/865, accuracy0.422, loss2.118, learning rate 0.0000785 \nStep: 700/865, accuracy0.297, loss2.311, learning rate 0.0000784 \nStep: 800/865, accuracy0.312, loss2.295, learning rate 0.0000784 \nStep: 864/865, accuracy0.226, loss2.545, learning rate 0.0000783 \nEpoch: 136/200, accuracy0.326, loss2.443, learning rate 0.000\nEstimated reamining runtime: 5:53:15.160296\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.472, Loss: 1.849\n=== Epoch: 137 ===\nStep: 0/865, accuracy0.219, loss2.669, learning rate 0.0000783 \nStep: 100/865, accuracy0.359, loss2.396, learning rate 0.0000783 \nStep: 200/865, accuracy0.312, loss2.509, learning rate 0.0000782 \nStep: 300/865, accuracy0.406, loss2.185, learning rate 0.0000782 \nStep: 400/865, accuracy0.281, loss2.636, learning rate 0.0000781 \nStep: 500/865, accuracy0.375, loss2.335, learning rate 0.0000780 \nStep: 600/865, accuracy0.328, loss2.385, learning rate 0.0000780 \nStep: 700/865, accuracy0.484, loss2.037, learning rate 0.0000779 \nStep: 800/865, accuracy0.453, loss2.088, learning rate 0.0000779 \nStep: 864/865, accuracy0.419, loss2.593, learning rate 0.0000778 \nEpoch: 137/200, accuracy0.329, loss2.445, learning rate 0.000\nEstimated reamining runtime: 5:53:13.002334\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.474, Loss: 1.845\n=== Epoch: 138 ===\nStep: 0/865, accuracy0.344, loss2.237, learning rate 0.0000778 \nStep: 100/865, accuracy0.281, loss2.561, learning rate 0.0000778 \nStep: 200/865, accuracy0.453, loss2.281, learning rate 0.0000777 \nStep: 300/865, accuracy0.312, loss2.486, learning rate 0.0000776 \nStep: 400/865, accuracy0.359, loss2.514, learning rate 0.0000776 \nStep: 500/865, accuracy0.344, loss2.493, learning rate 0.0000775 \nStep: 600/865, accuracy0.359, loss2.337, learning rate 0.0000775 \nStep: 700/865, accuracy0.203, loss2.867, learning rate 0.0000774 \nStep: 800/865, accuracy0.391, loss2.296, learning rate 0.0000773 \nStep: 864/865, accuracy0.290, loss2.551, learning rate 0.0000773 \nEpoch: 138/200, accuracy0.330, loss2.442, learning rate 0.000\nEstimated reamining runtime: 5:53:11.075135\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.476, Loss: 1.840\n=== Epoch: 139 ===\nStep: 0/865, accuracy0.312, loss2.385, learning rate 0.0000773 \nStep: 100/865, accuracy0.203, loss2.661, learning rate 0.0000772 \nStep: 200/865, accuracy0.328, loss2.492, learning rate 0.0000772 \nStep: 300/865, accuracy0.312, loss2.428, learning rate 0.0000771 \nStep: 400/865, accuracy0.312, loss2.395, learning rate 0.0000771 \nStep: 500/865, accuracy0.344, loss2.419, learning rate 0.0000770 \nStep: 600/865, accuracy0.344, loss2.517, learning rate 0.0000769 \nStep: 700/865, accuracy0.344, loss2.289, learning rate 0.0000769 \nStep: 800/865, accuracy0.359, loss2.133, learning rate 0.0000768 \nStep: 864/865, accuracy0.355, loss2.580, learning rate 0.0000768 \nEpoch: 139/200, accuracy0.328, loss2.435, learning rate 0.000\nEstimated reamining runtime: 5:53:08.825502\n--Validation--\nValidation : Accuracy: 0.472, Loss: 1.841\n=== Epoch: 140 ===\nStep: 0/865, accuracy0.391, loss2.120, learning rate 0.0000768 \nStep: 100/865, accuracy0.234, loss2.769, learning rate 0.0000767 \nStep: 200/865, accuracy0.344, loss2.359, learning rate 0.0000767 \nStep: 300/865, accuracy0.328, loss2.477, learning rate 0.0000766 \nStep: 400/865, accuracy0.391, loss2.077, learning rate 0.0000765 \nStep: 500/865, accuracy0.406, loss2.463, learning rate 0.0000765 \nStep: 600/865, accuracy0.266, loss2.692, learning rate 0.0000764 \nStep: 700/865, accuracy0.391, loss1.989, learning rate 0.0000764 \nStep: 800/865, accuracy0.375, loss2.275, learning rate 0.0000763 \nStep: 864/865, accuracy0.355, loss2.466, learning rate 0.0000763 \nEpoch: 140/200, accuracy0.327, loss2.447, learning rate 0.000\nEstimated reamining runtime: 5:53:07.625334\n--Validation--\nValidation : Accuracy: 0.474, Loss: 1.841\n=== Epoch: 141 ===\nStep: 0/865, accuracy0.312, loss2.599, learning rate 0.0000763 \nStep: 100/865, accuracy0.234, loss2.565, learning rate 0.0000762 \nStep: 200/865, accuracy0.344, loss2.443, learning rate 0.0000762 \nStep: 300/865, accuracy0.281, loss2.446, learning rate 0.0000761 \nStep: 400/865, accuracy0.422, loss2.407, learning rate 0.0000760 \nStep: 500/865, accuracy0.281, loss2.474, learning rate 0.0000760 \nStep: 600/865, accuracy0.312, loss2.352, learning rate 0.0000759 \nStep: 700/865, accuracy0.328, loss2.584, learning rate 0.0000759 \nStep: 800/865, accuracy0.359, loss2.203, learning rate 0.0000758 \nStep: 864/865, accuracy0.355, loss2.258, learning rate 0.0000758 \nEpoch: 141/200, accuracy0.329, loss2.443, learning rate 0.000\nEstimated reamining runtime: 5:53:05.349331\n--Validation--\nValidation : Accuracy: 0.470, Loss: 1.851\n=== Epoch: 142 ===\nStep: 0/865, accuracy0.266, loss2.510, learning rate 0.0000758 \nStep: 100/865, accuracy0.328, loss2.450, learning rate 0.0000757 \nStep: 200/865, accuracy0.203, loss2.699, learning rate 0.0000757 \nStep: 300/865, accuracy0.312, loss2.655, learning rate 0.0000756 \nStep: 400/865, accuracy0.234, loss2.698, learning rate 0.0000755 \nStep: 500/865, accuracy0.344, loss2.428, learning rate 0.0000755 \nStep: 600/865, accuracy0.453, loss2.061, learning rate 0.0000754 \nStep: 700/865, accuracy0.281, loss2.637, learning rate 0.0000754 \nStep: 800/865, accuracy0.312, loss2.402, learning rate 0.0000753 \nStep: 864/865, accuracy0.452, loss2.085, learning rate 0.0000753 \nEpoch: 142/200, accuracy0.327, loss2.441, learning rate 0.000\nEstimated reamining runtime: 5:53:02.787209\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.475, Loss: 1.834\n=== Epoch: 143 ===\nStep: 0/865, accuracy0.250, loss2.642, learning rate 0.0000753 \nStep: 100/865, accuracy0.516, loss2.067, learning rate 0.0000752 \nStep: 200/865, accuracy0.281, loss2.299, learning rate 0.0000752 \nStep: 300/865, accuracy0.328, loss2.328, learning rate 0.0000751 \nStep: 400/865, accuracy0.344, loss2.347, learning rate 0.0000751 \nStep: 500/865, accuracy0.312, loss2.461, learning rate 0.0000750 \nStep: 600/865, accuracy0.266, loss2.531, learning rate 0.0000749 \nStep: 700/865, accuracy0.328, loss2.462, learning rate 0.0000749 \nStep: 800/865, accuracy0.328, loss2.563, learning rate 0.0000748 \nStep: 864/865, accuracy0.323, loss2.276, learning rate 0.0000748 \nEpoch: 143/200, accuracy0.329, loss2.435, learning rate 0.000\nEstimated reamining runtime: 5:53:00.909537\n--Validation--\nValidation : Accuracy: 0.478, Loss: 1.836\n=== Epoch: 144 ===\nStep: 0/865, accuracy0.359, loss2.430, learning rate 0.0000748 \nStep: 100/865, accuracy0.359, loss2.204, learning rate 0.0000747 \nStep: 200/865, accuracy0.266, loss2.552, learning rate 0.0000747 \nStep: 300/865, accuracy0.297, loss2.522, learning rate 0.0000746 \nStep: 400/865, accuracy0.281, loss2.591, learning rate 0.0000746 \nStep: 500/865, accuracy0.344, loss2.438, learning rate 0.0000745 \nStep: 600/865, accuracy0.344, loss2.427, learning rate 0.0000745 \nStep: 700/865, accuracy0.375, loss1.999, learning rate 0.0000744 \nStep: 800/865, accuracy0.391, loss2.351, learning rate 0.0000744 \nStep: 864/865, accuracy0.290, loss2.494, learning rate 0.0000743 \nEpoch: 144/200, accuracy0.328, loss2.439, learning rate 0.000\nEstimated reamining runtime: 5:52:59.552453\n--Validation--\nValidation : Accuracy: 0.475, Loss: 1.840\n=== Epoch: 145 ===\nStep: 0/865, accuracy0.359, loss2.145, learning rate 0.0000743 \nStep: 100/865, accuracy0.328, loss2.502, learning rate 0.0000743 \nStep: 200/865, accuracy0.359, loss2.363, learning rate 0.0000742 \nStep: 300/865, accuracy0.250, loss2.755, learning rate 0.0000742 \nStep: 400/865, accuracy0.375, loss2.163, learning rate 0.0000741 \nStep: 500/865, accuracy0.312, loss2.427, learning rate 0.0000740 \nStep: 600/865, accuracy0.281, loss2.560, learning rate 0.0000740 \nStep: 700/865, accuracy0.391, loss2.499, learning rate 0.0000739 \nStep: 800/865, accuracy0.250, loss2.611, learning rate 0.0000739 \nStep: 864/865, accuracy0.290, loss2.504, learning rate 0.0000738 \nEpoch: 145/200, accuracy0.329, loss2.434, learning rate 0.000\nEstimated reamining runtime: 5:52:57.260776\n--Validation--\nValidation : Accuracy: 0.473, Loss: 1.835\n=== Epoch: 146 ===\nStep: 0/865, accuracy0.203, loss2.741, learning rate 0.0000738 \nStep: 100/865, accuracy0.344, loss2.655, learning rate 0.0000738 \nStep: 200/865, accuracy0.266, loss2.596, learning rate 0.0000737 \nStep: 300/865, accuracy0.406, loss2.234, learning rate 0.0000737 \nStep: 400/865, accuracy0.312, loss2.580, learning rate 0.0000736 \nStep: 500/865, accuracy0.328, loss2.540, learning rate 0.0000736 \nStep: 600/865, accuracy0.359, loss2.196, learning rate 0.0000735 \nStep: 700/865, accuracy0.297, loss2.509, learning rate 0.0000735 \nStep: 800/865, accuracy0.312, loss2.383, learning rate 0.0000734 \nStep: 864/865, accuracy0.290, loss2.681, learning rate 0.0000734 \nEpoch: 146/200, accuracy0.329, loss2.427, learning rate 0.000\nEstimated reamining runtime: 5:52:54.262737\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.479, Loss: 1.833\n=== Epoch: 147 ===\nStep: 0/865, accuracy0.297, loss2.553, learning rate 0.0000734 \nStep: 100/865, accuracy0.344, loss2.263, learning rate 0.0000733 \nStep: 200/865, accuracy0.312, loss2.509, learning rate 0.0000733 \nStep: 300/865, accuracy0.359, loss2.578, learning rate 0.0000732 \nStep: 400/865, accuracy0.344, loss2.536, learning rate 0.0000732 \nStep: 500/865, accuracy0.406, loss2.182, learning rate 0.0000731 \nStep: 600/865, accuracy0.328, loss2.340, learning rate 0.0000731 \nStep: 700/865, accuracy0.406, loss2.369, learning rate 0.0000730 \nStep: 800/865, accuracy0.391, loss2.492, learning rate 0.0000729 \nStep: 864/865, accuracy0.226, loss2.665, learning rate 0.0000729 \nEpoch: 147/200, accuracy0.330, loss2.430, learning rate 0.000\nEstimated reamining runtime: 5:52:51.732918\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.486, Loss: 1.826\n=== Epoch: 148 ===\nStep: 0/865, accuracy0.359, loss2.326, learning rate 0.0000729 \nStep: 100/865, accuracy0.438, loss2.243, learning rate 0.0000729 \nStep: 200/865, accuracy0.344, loss2.431, learning rate 0.0000728 \nStep: 300/865, accuracy0.359, loss2.270, learning rate 0.0000728 \nStep: 400/865, accuracy0.281, loss2.312, learning rate 0.0000727 \nStep: 500/865, accuracy0.297, loss2.707, learning rate 0.0000726 \nStep: 600/865, accuracy0.250, loss2.647, learning rate 0.0000726 \nStep: 700/865, accuracy0.297, loss2.634, learning rate 0.0000725 \nStep: 800/865, accuracy0.266, loss2.957, learning rate 0.0000725 \nStep: 864/865, accuracy0.452, loss2.309, learning rate 0.0000725 \nEpoch: 148/200, accuracy0.332, loss2.429, learning rate 0.000\nEstimated reamining runtime: 5:52:49.579634\n--Validation--\nValidation : Accuracy: 0.478, Loss: 1.836\n=== Epoch: 149 ===\nStep: 0/865, accuracy0.203, loss2.610, learning rate 0.0000725 \nStep: 100/865, accuracy0.281, loss2.674, learning rate 0.0000724 \nStep: 200/865, accuracy0.375, loss2.400, learning rate 0.0000723 \nStep: 300/865, accuracy0.312, loss2.358, learning rate 0.0000723 \nStep: 400/865, accuracy0.219, loss2.685, learning rate 0.0000722 \nStep: 500/865, accuracy0.328, loss2.219, learning rate 0.0000722 \nStep: 600/865, accuracy0.266, loss2.687, learning rate 0.0000721 \nStep: 700/865, accuracy0.422, loss2.148, learning rate 0.0000721 \nStep: 800/865, accuracy0.328, loss2.203, learning rate 0.0000720 \nStep: 864/865, accuracy0.484, loss2.081, learning rate 0.0000720 \nEpoch: 149/200, accuracy0.330, loss2.434, learning rate 0.000\nEstimated reamining runtime: 5:52:48.053632\n--Validation--\nValidation : Accuracy: 0.476, Loss: 1.839\n=== Epoch: 150 ===\nStep: 0/865, accuracy0.391, loss2.352, learning rate 0.0000720 \nStep: 100/865, accuracy0.281, loss2.397, learning rate 0.0000720 \nStep: 200/865, accuracy0.344, loss2.397, learning rate 0.0000719 \nStep: 300/865, accuracy0.438, loss2.241, learning rate 0.0000718 \nStep: 400/865, accuracy0.297, loss2.465, learning rate 0.0000718 \nStep: 500/865, accuracy0.312, loss2.408, learning rate 0.0000717 \nStep: 600/865, accuracy0.188, loss2.861, learning rate 0.0000717 \nStep: 700/865, accuracy0.344, loss2.649, learning rate 0.0000716 \nStep: 800/865, accuracy0.328, loss2.551, learning rate 0.0000716 \nStep: 864/865, accuracy0.323, loss2.694, learning rate 0.0000716 \nEpoch: 150/200, accuracy0.331, loss2.430, learning rate 0.000\nEstimated reamining runtime: 5:52:45.533357\n--Validation--\nValidation : Accuracy: 0.480, Loss: 1.844\n=== Epoch: 151 ===\nStep: 0/865, accuracy0.188, loss2.478, learning rate 0.0000716 \nStep: 100/865, accuracy0.266, loss2.699, learning rate 0.0000715 \nStep: 200/865, accuracy0.297, loss2.338, learning rate 0.0000715 \nStep: 300/865, accuracy0.344, loss2.286, learning rate 0.0000714 \nStep: 400/865, accuracy0.422, loss2.335, learning rate 0.0000714 \nStep: 500/865, accuracy0.312, loss2.307, learning rate 0.0000713 \nStep: 600/865, accuracy0.312, loss2.639, learning rate 0.0000713 \nStep: 700/865, accuracy0.266, loss2.499, learning rate 0.0000712 \nStep: 800/865, accuracy0.281, loss2.580, learning rate 0.0000711 \nStep: 864/865, accuracy0.484, loss1.852, learning rate 0.0000711 \nEpoch: 151/200, accuracy0.328, loss2.428, learning rate 0.000\nEstimated reamining runtime: 5:52:40.960648\n--Validation--\nValidation : Accuracy: 0.477, Loss: 1.833\n=== Epoch: 152 ===\nStep: 0/865, accuracy0.234, loss2.626, learning rate 0.0000711 \nStep: 100/865, accuracy0.297, loss2.671, learning rate 0.0000711 \nStep: 200/865, accuracy0.312, loss2.515, learning rate 0.0000710 \nStep: 300/865, accuracy0.359, loss2.248, learning rate 0.0000710 \nStep: 400/865, accuracy0.359, loss2.350, learning rate 0.0000709 \nStep: 500/865, accuracy0.312, loss2.558, learning rate 0.0000709 \nStep: 600/865, accuracy0.312, loss2.583, learning rate 0.0000708 \nStep: 700/865, accuracy0.469, loss2.092, learning rate 0.0000708 \nStep: 800/865, accuracy0.359, loss2.121, learning rate 0.0000707 \nStep: 864/865, accuracy0.355, loss2.279, learning rate 0.0000707 \nEpoch: 152/200, accuracy0.330, loss2.432, learning rate 0.000\nEstimated reamining runtime: 5:52:36.545881\n--Validation--\nValidation : Accuracy: 0.477, Loss: 1.836\n=== Epoch: 153 ===\nStep: 0/865, accuracy0.297, loss2.387, learning rate 0.0000707 \nStep: 100/865, accuracy0.297, loss2.421, learning rate 0.0000706 \nStep: 200/865, accuracy0.391, loss2.424, learning rate 0.0000706 \nStep: 300/865, accuracy0.281, loss2.679, learning rate 0.0000705 \nStep: 400/865, accuracy0.312, loss2.460, learning rate 0.0000705 \nStep: 500/865, accuracy0.328, loss2.360, learning rate 0.0000704 \nStep: 600/865, accuracy0.281, loss2.610, learning rate 0.0000704 \nStep: 700/865, accuracy0.312, loss2.426, learning rate 0.0000703 \nStep: 800/865, accuracy0.359, loss2.341, learning rate 0.0000703 \nStep: 864/865, accuracy0.355, loss2.052, learning rate 0.0000703 \nEpoch: 153/200, accuracy0.332, loss2.424, learning rate 0.000\nEstimated reamining runtime: 5:52:32.112547\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.478, Loss: 1.821\n=== Epoch: 154 ===\nStep: 0/865, accuracy0.359, loss2.202, learning rate 0.0000703 \nStep: 100/865, accuracy0.328, loss2.360, learning rate 0.0000702 \nStep: 200/865, accuracy0.297, loss2.348, learning rate 0.0000702 \nStep: 300/865, accuracy0.234, loss2.460, learning rate 0.0000701 \nStep: 400/865, accuracy0.297, loss2.417, learning rate 0.0000701 \nStep: 500/865, accuracy0.328, loss2.358, learning rate 0.0000700 \nStep: 600/865, accuracy0.281, loss2.525, learning rate 0.0000700 \nStep: 700/865, accuracy0.328, loss2.350, learning rate 0.0000699 \nStep: 800/865, accuracy0.203, loss2.458, learning rate 0.0000699 \nStep: 864/865, accuracy0.387, loss2.218, learning rate 0.0000698 \nEpoch: 154/200, accuracy0.332, loss2.425, learning rate 0.000\nEstimated reamining runtime: 5:52:27.359873\n--Validation--\nValidation : Accuracy: 0.474, Loss: 1.840\n=== Epoch: 155 ===\nStep: 0/865, accuracy0.312, loss2.350, learning rate 0.0000698 \nStep: 100/865, accuracy0.328, loss2.708, learning rate 0.0000698 \nStep: 200/865, accuracy0.359, loss2.393, learning rate 0.0000697 \nStep: 300/865, accuracy0.453, loss2.077, learning rate 0.0000697 \nStep: 400/865, accuracy0.250, loss2.702, learning rate 0.0000696 \nStep: 500/865, accuracy0.328, loss2.662, learning rate 0.0000696 \nStep: 600/865, accuracy0.312, loss2.769, learning rate 0.0000695 \nStep: 700/865, accuracy0.375, loss2.191, learning rate 0.0000695 \nStep: 800/865, accuracy0.359, loss2.314, learning rate 0.0000694 \nStep: 864/865, accuracy0.452, loss2.361, learning rate 0.0000694 \nEpoch: 155/200, accuracy0.332, loss2.427, learning rate 0.000\nEstimated reamining runtime: 5:52:22.705845\n--Validation--\nValidation : Accuracy: 0.476, Loss: 1.828\n=== Epoch: 156 ===\nStep: 0/865, accuracy0.359, loss2.467, learning rate 0.0000694 \nStep: 100/865, accuracy0.281, loss2.564, learning rate 0.0000694 \nStep: 200/865, accuracy0.250, loss2.559, learning rate 0.0000693 \nStep: 300/865, accuracy0.281, loss2.807, learning rate 0.0000693 \nStep: 400/865, accuracy0.281, loss2.393, learning rate 0.0000692 \nStep: 500/865, accuracy0.406, loss2.163, learning rate 0.0000692 \nStep: 600/865, accuracy0.375, loss2.194, learning rate 0.0000691 \nStep: 700/865, accuracy0.297, loss2.331, learning rate 0.0000691 \nStep: 800/865, accuracy0.359, loss2.458, learning rate 0.0000690 \nStep: 864/865, accuracy0.387, loss2.423, learning rate 0.0000690 \nEpoch: 156/200, accuracy0.335, loss2.419, learning rate 0.000\nEstimated reamining runtime: 5:52:17.541214\n--Validation--\nValidation : Accuracy: 0.476, Loss: 1.824\n=== Epoch: 157 ===\nStep: 0/865, accuracy0.250, loss2.753, learning rate 0.0000690 \nStep: 100/865, accuracy0.250, loss2.837, learning rate 0.0000689 \nStep: 200/865, accuracy0.375, loss2.258, learning rate 0.0000689 \nStep: 300/865, accuracy0.312, loss2.424, learning rate 0.0000689 \nStep: 400/865, accuracy0.375, loss2.149, learning rate 0.0000688 \nStep: 500/865, accuracy0.281, loss2.524, learning rate 0.0000688 \nStep: 600/865, accuracy0.281, loss2.693, learning rate 0.0000687 \nStep: 700/865, accuracy0.422, loss2.204, learning rate 0.0000687 \nStep: 800/865, accuracy0.344, loss2.452, learning rate 0.0000686 \nStep: 864/865, accuracy0.419, loss2.228, learning rate 0.0000686 \nEpoch: 157/200, accuracy0.332, loss2.424, learning rate 0.000\nEstimated reamining runtime: 5:52:12.310242\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.482, Loss: 1.815\n=== Epoch: 158 ===\nStep: 0/865, accuracy0.250, loss2.605, learning rate 0.0000686 \nStep: 100/865, accuracy0.297, loss2.476, learning rate 0.0000685 \nStep: 200/865, accuracy0.375, loss2.223, learning rate 0.0000685 \nStep: 300/865, accuracy0.391, loss2.344, learning rate 0.0000684 \nStep: 400/865, accuracy0.406, loss2.224, learning rate 0.0000684 \nStep: 500/865, accuracy0.375, loss2.495, learning rate 0.0000684 \nStep: 600/865, accuracy0.219, loss2.674, learning rate 0.0000683 \nStep: 700/865, accuracy0.328, loss2.432, learning rate 0.0000683 \nStep: 800/865, accuracy0.406, loss2.359, learning rate 0.0000682 \nStep: 864/865, accuracy0.194, loss2.755, learning rate 0.0000682 \nEpoch: 158/200, accuracy0.334, loss2.422, learning rate 0.000\nEstimated reamining runtime: 5:52:07.569670\n--Validation--\nValidation : Accuracy: 0.479, Loss: 1.829\n=== Epoch: 159 ===\nStep: 0/865, accuracy0.422, loss2.297, learning rate 0.0000682 \nStep: 100/865, accuracy0.344, loss2.520, learning rate 0.0000681 \nStep: 200/865, accuracy0.312, loss2.371, learning rate 0.0000681 \nStep: 300/865, accuracy0.312, loss2.468, learning rate 0.0000680 \nStep: 400/865, accuracy0.438, loss1.947, learning rate 0.0000680 \nStep: 500/865, accuracy0.328, loss2.327, learning rate 0.0000679 \nStep: 600/865, accuracy0.359, loss2.326, learning rate 0.0000679 \nStep: 700/865, accuracy0.391, loss2.490, learning rate 0.0000679 \nStep: 800/865, accuracy0.344, loss2.332, learning rate 0.0000678 \nStep: 864/865, accuracy0.355, loss2.285, learning rate 0.0000678 \nEpoch: 159/200, accuracy0.331, loss2.425, learning rate 0.000\nEstimated reamining runtime: 5:52:03.940004\n--Validation--\nValidation : Accuracy: 0.482, Loss: 1.828\n=== Epoch: 160 ===\nStep: 0/865, accuracy0.297, loss2.741, learning rate 0.0000678 \nStep: 100/865, accuracy0.250, loss2.413, learning rate 0.0000677 \nStep: 200/865, accuracy0.234, loss2.711, learning rate 0.0000677 \nStep: 300/865, accuracy0.391, loss2.318, learning rate 0.0000676 \nStep: 400/865, accuracy0.312, loss2.519, learning rate 0.0000676 \nStep: 500/865, accuracy0.359, loss2.326, learning rate 0.0000676 \nStep: 600/865, accuracy0.359, loss2.351, learning rate 0.0000675 \nStep: 700/865, accuracy0.281, loss2.603, learning rate 0.0000675 \nStep: 800/865, accuracy0.359, loss2.537, learning rate 0.0000674 \nStep: 864/865, accuracy0.387, loss2.520, learning rate 0.0000674 \nEpoch: 160/200, accuracy0.332, loss2.423, learning rate 0.000\nEstimated reamining runtime: 5:51:59.106568\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.485, Loss: 1.810\n=== Epoch: 161 ===\nStep: 0/865, accuracy0.266, loss2.560, learning rate 0.0000674 \nStep: 100/865, accuracy0.422, loss2.319, learning rate 0.0000673 \nStep: 200/865, accuracy0.312, loss2.548, learning rate 0.0000673 \nStep: 300/865, accuracy0.406, loss2.133, learning rate 0.0000672 \nStep: 400/865, accuracy0.266, loss2.595, learning rate 0.0000672 \nStep: 500/865, accuracy0.406, loss2.170, learning rate 0.0000672 \nStep: 600/865, accuracy0.375, loss2.259, learning rate 0.0000671 \nStep: 700/865, accuracy0.359, loss2.256, learning rate 0.0000671 \nStep: 800/865, accuracy0.312, loss2.514, learning rate 0.0000670 \nStep: 864/865, accuracy0.452, loss1.935, learning rate 0.0000670 \nEpoch: 161/200, accuracy0.333, loss2.423, learning rate 0.000\nEstimated reamining runtime: 5:51:54.604480\n--Validation--\nValidation : Accuracy: 0.482, Loss: 1.821\n=== Epoch: 162 ===\nStep: 0/865, accuracy0.281, loss2.417, learning rate 0.0000670 \nStep: 100/865, accuracy0.312, loss2.429, learning rate 0.0000670 \nStep: 200/865, accuracy0.297, loss2.483, learning rate 0.0000669 \nStep: 300/865, accuracy0.297, loss2.577, learning rate 0.0000669 \nStep: 400/865, accuracy0.312, loss2.493, learning rate 0.0000668 \nStep: 500/865, accuracy0.375, loss2.514, learning rate 0.0000668 \nStep: 600/865, accuracy0.281, loss2.704, learning rate 0.0000667 \nStep: 700/865, accuracy0.344, loss2.349, learning rate 0.0000667 \nStep: 800/865, accuracy0.281, loss2.626, learning rate 0.0000666 \nStep: 864/865, accuracy0.516, loss1.962, learning rate 0.0000666 \nEpoch: 162/200, accuracy0.332, loss2.418, learning rate 0.000\nEstimated reamining runtime: 5:51:52.450609\n--Validation--\nValidation : Accuracy: 0.484, Loss: 1.820\n=== Epoch: 163 ===\nStep: 0/865, accuracy0.391, loss2.285, learning rate 0.0000666 \nStep: 100/865, accuracy0.391, loss2.400, learning rate 0.0000666 \nStep: 200/865, accuracy0.359, loss2.333, learning rate 0.0000665 \nStep: 300/865, accuracy0.297, loss2.517, learning rate 0.0000665 \nStep: 400/865, accuracy0.344, loss2.454, learning rate 0.0000664 \nStep: 500/865, accuracy0.375, loss2.314, learning rate 0.0000664 \nStep: 600/865, accuracy0.391, loss2.407, learning rate 0.0000663 \nStep: 700/865, accuracy0.453, loss2.187, learning rate 0.0000663 \nStep: 800/865, accuracy0.391, loss2.213, learning rate 0.0000663 \nStep: 864/865, accuracy0.226, loss2.952, learning rate 0.0000662 \nEpoch: 163/200, accuracy0.335, loss2.415, learning rate 0.000\nEstimated reamining runtime: 5:51:50.941773\n--Validation--\nValidation : Accuracy: 0.479, Loss: 1.824\n=== Epoch: 164 ===\nStep: 0/865, accuracy0.312, loss2.448, learning rate 0.0000662 \nStep: 100/865, accuracy0.328, loss2.388, learning rate 0.0000662 \nStep: 200/865, accuracy0.406, loss2.419, learning rate 0.0000661 \nStep: 300/865, accuracy0.219, loss2.969, learning rate 0.0000661 \nStep: 400/865, accuracy0.344, loss2.427, learning rate 0.0000661 \nStep: 500/865, accuracy0.328, loss2.416, learning rate 0.0000660 \nStep: 600/865, accuracy0.266, loss2.674, learning rate 0.0000660 \nStep: 700/865, accuracy0.406, loss2.156, learning rate 0.0000659 \nStep: 800/865, accuracy0.266, loss2.785, learning rate 0.0000659 \nStep: 864/865, accuracy0.419, loss2.172, learning rate 0.0000659 \nEpoch: 164/200, accuracy0.335, loss2.413, learning rate 0.000\nEstimated reamining runtime: 5:51:49.594232\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.486, Loss: 1.805\n=== Epoch: 165 ===\nStep: 0/865, accuracy0.297, loss2.475, learning rate 0.0000659 \nStep: 100/865, accuracy0.375, loss2.603, learning rate 0.0000658 \nStep: 200/865, accuracy0.391, loss2.198, learning rate 0.0000658 \nStep: 300/865, accuracy0.297, loss2.440, learning rate 0.0000657 \nStep: 400/865, accuracy0.359, loss2.498, learning rate 0.0000657 \nStep: 500/865, accuracy0.375, loss2.198, learning rate 0.0000656 \nStep: 600/865, accuracy0.391, loss2.221, learning rate 0.0000656 \nStep: 700/865, accuracy0.375, loss2.291, learning rate 0.0000655 \nStep: 800/865, accuracy0.375, loss2.209, learning rate 0.0000655 \nStep: 864/865, accuracy0.387, loss2.445, learning rate 0.0000655 \nEpoch: 165/200, accuracy0.331, loss2.419, learning rate 0.000\nEstimated reamining runtime: 5:51:48.612394\n--Validation--\nValidation : Accuracy: 0.484, Loss: 1.820\n=== Epoch: 166 ===\nStep: 0/865, accuracy0.328, loss2.334, learning rate 0.0000655 \nStep: 100/865, accuracy0.328, loss2.615, learning rate 0.0000654 \nStep: 200/865, accuracy0.406, loss2.279, learning rate 0.0000654 \nStep: 300/865, accuracy0.266, loss2.532, learning rate 0.0000653 \nStep: 400/865, accuracy0.250, loss2.492, learning rate 0.0000653 \nStep: 500/865, accuracy0.344, loss2.566, learning rate 0.0000653 \nStep: 600/865, accuracy0.375, loss2.541, learning rate 0.0000652 \nStep: 700/865, accuracy0.469, loss2.139, learning rate 0.0000652 \nStep: 800/865, accuracy0.281, loss2.375, learning rate 0.0000651 \nStep: 864/865, accuracy0.258, loss2.445, learning rate 0.0000651 \nEpoch: 166/200, accuracy0.331, loss2.420, learning rate 0.000\nEstimated reamining runtime: 5:51:49.659883\n--Validation--\nValidation : Accuracy: 0.484, Loss: 1.811\n=== Epoch: 167 ===\nStep: 0/865, accuracy0.234, loss2.727, learning rate 0.0000651 \nStep: 100/865, accuracy0.234, loss2.596, learning rate 0.0000651 \nStep: 200/865, accuracy0.250, loss2.534, learning rate 0.0000650 \nStep: 300/865, accuracy0.344, loss2.266, learning rate 0.0000650 \nStep: 400/865, accuracy0.344, loss2.500, learning rate 0.0000649 \nStep: 500/865, accuracy0.406, loss2.329, learning rate 0.0000649 \nStep: 600/865, accuracy0.312, loss2.421, learning rate 0.0000649 \nStep: 700/865, accuracy0.438, loss2.204, learning rate 0.0000648 \nStep: 800/865, accuracy0.219, loss2.649, learning rate 0.0000648 \nStep: 864/865, accuracy0.387, loss2.290, learning rate 0.0000647 \nEpoch: 167/200, accuracy0.333, loss2.417, learning rate 0.000\nEstimated reamining runtime: 5:51:49.207784\n--Validation--\nValidation : Accuracy: 0.485, Loss: 1.810\n=== Epoch: 168 ===\nStep: 0/865, accuracy0.328, loss2.669, learning rate 0.0000647 \nStep: 100/865, accuracy0.375, loss2.351, learning rate 0.0000647 \nStep: 200/865, accuracy0.344, loss2.688, learning rate 0.0000647 \nStep: 300/865, accuracy0.375, loss2.302, learning rate 0.0000646 \nStep: 400/865, accuracy0.266, loss2.466, learning rate 0.0000646 \nStep: 500/865, accuracy0.344, loss2.577, learning rate 0.0000645 \nStep: 600/865, accuracy0.203, loss2.679, learning rate 0.0000645 \nStep: 700/865, accuracy0.391, loss2.316, learning rate 0.0000645 \nStep: 800/865, accuracy0.406, loss2.247, learning rate 0.0000644 \nStep: 864/865, accuracy0.226, loss2.815, learning rate 0.0000644 \nEpoch: 168/200, accuracy0.335, loss2.415, learning rate 0.000\nEstimated reamining runtime: 5:51:48.064122\n--Validation--\nValidation : Accuracy: 0.483, Loss: 1.808\n=== Epoch: 169 ===\nStep: 0/865, accuracy0.297, loss2.499, learning rate 0.0000644 \nStep: 100/865, accuracy0.375, loss2.183, learning rate 0.0000643 \nStep: 200/865, accuracy0.250, loss2.589, learning rate 0.0000643 \nStep: 300/865, accuracy0.375, loss2.295, learning rate 0.0000643 \nStep: 400/865, accuracy0.250, loss2.698, learning rate 0.0000642 \nStep: 500/865, accuracy0.406, loss2.385, learning rate 0.0000642 \nStep: 600/865, accuracy0.375, loss2.411, learning rate 0.0000641 \nStep: 700/865, accuracy0.312, loss2.608, learning rate 0.0000641 \nStep: 800/865, accuracy0.391, loss2.507, learning rate 0.0000641 \nStep: 864/865, accuracy0.290, loss2.555, learning rate 0.0000640 \nEpoch: 169/200, accuracy0.336, loss2.412, learning rate 0.000\nEstimated reamining runtime: 5:51:46.819507\n--Validation--\nValidation : Accuracy: 0.482, Loss: 1.816\n=== Epoch: 170 ===\nStep: 0/865, accuracy0.359, loss2.455, learning rate 0.0000640 \nStep: 100/865, accuracy0.234, loss2.762, learning rate 0.0000640 \nStep: 200/865, accuracy0.281, loss2.537, learning rate 0.0000639 \nStep: 300/865, accuracy0.375, loss2.255, learning rate 0.0000639 \nStep: 400/865, accuracy0.266, loss2.471, learning rate 0.0000639 \nStep: 500/865, accuracy0.344, loss2.296, learning rate 0.0000638 \nStep: 600/865, accuracy0.219, loss2.767, learning rate 0.0000638 \nStep: 700/865, accuracy0.406, loss2.195, learning rate 0.0000637 \nStep: 800/865, accuracy0.453, loss2.314, learning rate 0.0000637 \nStep: 864/865, accuracy0.484, loss2.215, learning rate 0.0000637 \nEpoch: 170/200, accuracy0.335, loss2.409, learning rate 0.000\nEstimated reamining runtime: 5:51:45.099082\n--Validation--\nValidation : Accuracy: 0.479, Loss: 1.818\n=== Epoch: 171 ===\nStep: 0/865, accuracy0.297, loss2.313, learning rate 0.0000637 \nStep: 100/865, accuracy0.328, loss2.446, learning rate 0.0000636 \nStep: 200/865, accuracy0.312, loss2.416, learning rate 0.0000636 \nStep: 300/865, accuracy0.312, loss2.490, learning rate 0.0000636 \nStep: 400/865, accuracy0.281, loss2.387, learning rate 0.0000635 \nStep: 500/865, accuracy0.328, loss2.460, learning rate 0.0000635 \nStep: 600/865, accuracy0.266, loss2.476, learning rate 0.0000634 \nStep: 700/865, accuracy0.344, loss2.291, learning rate 0.0000634 \nStep: 800/865, accuracy0.344, loss2.473, learning rate 0.0000634 \nStep: 864/865, accuracy0.323, loss2.362, learning rate 0.0000633 \nEpoch: 171/200, accuracy0.334, loss2.411, learning rate 0.000\nEstimated reamining runtime: 5:51:43.422517\n--Validation--\nValidation : Accuracy: 0.485, Loss: 1.809\n=== Epoch: 172 ===\nStep: 0/865, accuracy0.281, loss2.704, learning rate 0.0000633 \nStep: 100/865, accuracy0.406, loss2.310, learning rate 0.0000633 \nStep: 200/865, accuracy0.312, loss2.436, learning rate 0.0000632 \nStep: 300/865, accuracy0.266, loss2.410, learning rate 0.0000632 \nStep: 400/865, accuracy0.250, loss2.667, learning rate 0.0000632 \nStep: 500/865, accuracy0.297, loss2.479, learning rate 0.0000631 \nStep: 600/865, accuracy0.344, loss2.410, learning rate 0.0000631 \nStep: 700/865, accuracy0.391, loss2.312, learning rate 0.0000630 \nStep: 800/865, accuracy0.281, loss2.607, learning rate 0.0000630 \nStep: 864/865, accuracy0.516, loss2.069, learning rate 0.0000630 \nEpoch: 172/200, accuracy0.335, loss2.417, learning rate 0.000\nEstimated reamining runtime: 5:51:41.906523\n--Validation--\nValidation : Accuracy: 0.478, Loss: 1.816\n=== Epoch: 173 ===\nStep: 0/865, accuracy0.312, loss2.456, learning rate 0.0000630 \nStep: 100/865, accuracy0.469, loss2.146, learning rate 0.0000629 \nStep: 200/865, accuracy0.422, loss2.119, learning rate 0.0000629 \nStep: 300/865, accuracy0.297, loss2.247, learning rate 0.0000629 \nStep: 400/865, accuracy0.422, loss2.470, learning rate 0.0000628 \nStep: 500/865, accuracy0.250, loss2.693, learning rate 0.0000628 \nStep: 600/865, accuracy0.422, loss2.098, learning rate 0.0000627 \nStep: 700/865, accuracy0.453, loss2.038, learning rate 0.0000627 \nStep: 800/865, accuracy0.297, loss2.576, learning rate 0.0000627 \nStep: 864/865, accuracy0.484, loss1.878, learning rate 0.0000626 \nEpoch: 173/200, accuracy0.335, loss2.413, learning rate 0.000\nEstimated reamining runtime: 5:51:40.548718\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.482, Loss: 1.803\n=== Epoch: 174 ===\nStep: 0/865, accuracy0.312, loss2.488, learning rate 0.0000626 \nStep: 100/865, accuracy0.422, loss2.491, learning rate 0.0000626 \nStep: 200/865, accuracy0.297, loss2.532, learning rate 0.0000626 \nStep: 300/865, accuracy0.328, loss2.513, learning rate 0.0000625 \nStep: 400/865, accuracy0.406, loss2.245, learning rate 0.0000625 \nStep: 500/865, accuracy0.391, loss2.473, learning rate 0.0000624 \nStep: 600/865, accuracy0.297, loss2.504, learning rate 0.0000624 \nStep: 700/865, accuracy0.281, loss2.310, learning rate 0.0000624 \nStep: 800/865, accuracy0.297, loss2.666, learning rate 0.0000623 \nStep: 864/865, accuracy0.355, loss2.148, learning rate 0.0000623 \nEpoch: 174/200, accuracy0.336, loss2.415, learning rate 0.000\nEstimated reamining runtime: 5:51:39.255815\n--Validation--\nValidation : Accuracy: 0.482, Loss: 1.819\n=== Epoch: 175 ===\nStep: 0/865, accuracy0.250, loss2.615, learning rate 0.0000623 \nStep: 100/865, accuracy0.375, loss2.504, learning rate 0.0000623 \nStep: 200/865, accuracy0.375, loss2.302, learning rate 0.0000622 \nStep: 300/865, accuracy0.328, loss2.375, learning rate 0.0000622 \nStep: 400/865, accuracy0.422, loss2.420, learning rate 0.0000621 \nStep: 500/865, accuracy0.203, loss2.750, learning rate 0.0000621 \nStep: 600/865, accuracy0.203, loss2.585, learning rate 0.0000621 \nStep: 700/865, accuracy0.391, loss2.361, learning rate 0.0000620 \nStep: 800/865, accuracy0.406, loss2.127, learning rate 0.0000620 \nStep: 864/865, accuracy0.290, loss2.277, learning rate 0.0000620 \nEpoch: 175/200, accuracy0.332, loss2.415, learning rate 0.000\nEstimated reamining runtime: 5:51:37.573055\n--Validation--\nValidation : Accuracy: 0.485, Loss: 1.811\n=== Epoch: 176 ===\nStep: 0/865, accuracy0.297, loss2.391, learning rate 0.0000620 \nStep: 100/865, accuracy0.266, loss2.669, learning rate 0.0000619 \nStep: 200/865, accuracy0.281, loss2.517, learning rate 0.0000619 \nStep: 300/865, accuracy0.281, loss2.220, learning rate 0.0000619 \nStep: 400/865, accuracy0.344, loss2.161, learning rate 0.0000618 \nStep: 500/865, accuracy0.312, loss2.278, learning rate 0.0000618 \nStep: 600/865, accuracy0.328, loss2.357, learning rate 0.0000617 \nStep: 700/865, accuracy0.375, loss2.338, learning rate 0.0000617 \nStep: 800/865, accuracy0.297, loss2.386, learning rate 0.0000617 \nStep: 864/865, accuracy0.548, loss2.180, learning rate 0.0000616 \nEpoch: 176/200, accuracy0.335, loss2.403, learning rate 0.000\nEstimated reamining runtime: 5:51:36.073750\n--Validation--\nValidation : Accuracy: 0.484, Loss: 1.810\n=== Epoch: 177 ===\nStep: 0/865, accuracy0.297, loss2.483, learning rate 0.0000616 \nStep: 100/865, accuracy0.344, loss2.278, learning rate 0.0000616 \nStep: 200/865, accuracy0.281, loss2.721, learning rate 0.0000616 \nStep: 300/865, accuracy0.438, loss2.085, learning rate 0.0000615 \nStep: 400/865, accuracy0.344, loss2.522, learning rate 0.0000615 \nStep: 500/865, accuracy0.297, loss2.332, learning rate 0.0000614 \nStep: 600/865, accuracy0.281, loss2.541, learning rate 0.0000614 \nStep: 700/865, accuracy0.281, loss2.513, learning rate 0.0000614 \nStep: 800/865, accuracy0.359, loss2.480, learning rate 0.0000613 \nStep: 864/865, accuracy0.290, loss2.868, learning rate 0.0000613 \nEpoch: 177/200, accuracy0.336, loss2.403, learning rate 0.000\nEstimated reamining runtime: 5:51:34.955956\n--Validation--\nValidation : Accuracy: 0.485, Loss: 1.803\n=== Epoch: 178 ===\nStep: 0/865, accuracy0.219, loss2.733, learning rate 0.0000613 \nStep: 100/865, accuracy0.281, loss2.672, learning rate 0.0000613 \nStep: 200/865, accuracy0.359, loss2.284, learning rate 0.0000612 \nStep: 300/865, accuracy0.328, loss2.304, learning rate 0.0000612 \nStep: 400/865, accuracy0.375, loss2.383, learning rate 0.0000612 \nStep: 500/865, accuracy0.391, loss2.305, learning rate 0.0000611 \nStep: 600/865, accuracy0.438, loss2.332, learning rate 0.0000611 \nStep: 700/865, accuracy0.297, loss2.603, learning rate 0.0000610 \nStep: 800/865, accuracy0.250, loss2.687, learning rate 0.0000610 \nStep: 864/865, accuracy0.258, loss2.737, learning rate 0.0000610 \nEpoch: 178/200, accuracy0.336, loss2.411, learning rate 0.000\nEstimated reamining runtime: 5:51:33.844551\n--Validation--\nValidation : Accuracy: 0.488, Loss: 1.805\n=== Epoch: 179 ===\nStep: 0/865, accuracy0.250, loss2.647, learning rate 0.0000610 \nStep: 100/865, accuracy0.312, loss2.359, learning rate 0.0000609 \nStep: 200/865, accuracy0.406, loss2.169, learning rate 0.0000609 \nStep: 300/865, accuracy0.297, loss2.805, learning rate 0.0000609 \nStep: 400/865, accuracy0.344, loss2.325, learning rate 0.0000608 \nStep: 500/865, accuracy0.312, loss2.277, learning rate 0.0000608 \nStep: 600/865, accuracy0.281, loss2.283, learning rate 0.0000608 \nStep: 700/865, accuracy0.438, loss2.170, learning rate 0.0000607 \nStep: 800/865, accuracy0.312, loss2.579, learning rate 0.0000607 \nStep: 864/865, accuracy0.290, loss2.592, learning rate 0.0000607 \nEpoch: 179/200, accuracy0.334, loss2.411, learning rate 0.000\nEstimated reamining runtime: 5:51:32.768966\n--Validation--\nValidation : Accuracy: 0.484, Loss: 1.804\n=== Epoch: 180 ===\nStep: 0/865, accuracy0.312, loss2.162, learning rate 0.0000607 \nStep: 100/865, accuracy0.312, loss2.424, learning rate 0.0000606 \nStep: 200/865, accuracy0.203, loss2.777, learning rate 0.0000606 \nStep: 300/865, accuracy0.297, loss2.440, learning rate 0.0000606 \nStep: 400/865, accuracy0.359, loss2.200, learning rate 0.0000605 \nStep: 500/865, accuracy0.250, loss2.653, learning rate 0.0000605 \nStep: 600/865, accuracy0.531, loss1.847, learning rate 0.0000604 \nStep: 700/865, accuracy0.266, loss2.378, learning rate 0.0000604 \nStep: 800/865, accuracy0.297, loss2.628, learning rate 0.0000604 \nStep: 864/865, accuracy0.323, loss2.112, learning rate 0.0000604 \nEpoch: 180/200, accuracy0.336, loss2.411, learning rate 0.000\nEstimated reamining runtime: 5:51:32.161888\n--Validation--\nValidation : Accuracy: 0.485, Loss: 1.807\n=== Epoch: 181 ===\nStep: 0/865, accuracy0.438, loss2.210, learning rate 0.0000604 \nStep: 100/865, accuracy0.266, loss2.858, learning rate 0.0000603 \nStep: 200/865, accuracy0.359, loss2.284, learning rate 0.0000603 \nStep: 300/865, accuracy0.359, loss2.363, learning rate 0.0000602 \nStep: 400/865, accuracy0.406, loss2.268, learning rate 0.0000602 \nStep: 500/865, accuracy0.297, loss2.416, learning rate 0.0000602 \nStep: 600/865, accuracy0.375, loss2.353, learning rate 0.0000601 \nStep: 700/865, accuracy0.250, loss2.896, learning rate 0.0000601 \nStep: 800/865, accuracy0.359, loss2.459, learning rate 0.0000601 \nStep: 864/865, accuracy0.258, loss2.349, learning rate 0.0000600 \nEpoch: 181/200, accuracy0.336, loss2.410, learning rate 0.000\nEstimated reamining runtime: 5:51:31.763188\n--Validation--\nValidation : Accuracy: 0.485, Loss: 1.803\n=== Epoch: 182 ===\nStep: 0/865, accuracy0.250, loss2.672, learning rate 0.0000600 \nStep: 100/865, accuracy0.359, loss2.312, learning rate 0.0000600 \nStep: 200/865, accuracy0.453, loss2.133, learning rate 0.0000600 \nStep: 300/865, accuracy0.359, loss2.389, learning rate 0.0000599 \nStep: 400/865, accuracy0.312, loss2.348, learning rate 0.0000599 \nStep: 500/865, accuracy0.344, loss2.571, learning rate 0.0000599 \nStep: 600/865, accuracy0.250, loss2.420, learning rate 0.0000598 \nStep: 700/865, accuracy0.297, loss2.465, learning rate 0.0000598 \nStep: 800/865, accuracy0.250, loss2.713, learning rate 0.0000597 \nStep: 864/865, accuracy0.290, loss2.451, learning rate 0.0000597 \nEpoch: 182/200, accuracy0.337, loss2.404, learning rate 0.000\nEstimated reamining runtime: 5:51:31.799586\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.484, Loss: 1.802\n=== Epoch: 183 ===\nStep: 0/865, accuracy0.438, loss2.145, learning rate 0.0000597 \nStep: 100/865, accuracy0.344, loss2.308, learning rate 0.0000597 \nStep: 200/865, accuracy0.328, loss2.558, learning rate 0.0000597 \nStep: 300/865, accuracy0.391, loss2.340, learning rate 0.0000596 \nStep: 400/865, accuracy0.438, loss2.080, learning rate 0.0000596 \nStep: 500/865, accuracy0.344, loss2.253, learning rate 0.0000595 \nStep: 600/865, accuracy0.391, loss2.179, learning rate 0.0000595 \nStep: 700/865, accuracy0.359, loss2.399, learning rate 0.0000595 \nStep: 800/865, accuracy0.391, loss2.372, learning rate 0.0000594 \nStep: 864/865, accuracy0.484, loss2.433, learning rate 0.0000594 \nEpoch: 183/200, accuracy0.339, loss2.395, learning rate 0.000\nEstimated reamining runtime: 5:51:32.505925\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.485, Loss: 1.800\n=== Epoch: 184 ===\nStep: 0/865, accuracy0.359, loss2.274, learning rate 0.0000594 \nStep: 100/865, accuracy0.219, loss2.775, learning rate 0.0000594 \nStep: 200/865, accuracy0.297, loss2.586, learning rate 0.0000593 \nStep: 300/865, accuracy0.266, loss2.445, learning rate 0.0000593 \nStep: 400/865, accuracy0.391, loss2.363, learning rate 0.0000593 \nStep: 500/865, accuracy0.328, loss2.337, learning rate 0.0000592 \nStep: 600/865, accuracy0.438, loss2.106, learning rate 0.0000592 \nStep: 700/865, accuracy0.359, loss2.209, learning rate 0.0000592 \nStep: 800/865, accuracy0.359, loss2.194, learning rate 0.0000591 \nStep: 864/865, accuracy0.452, loss2.310, learning rate 0.0000591 \nEpoch: 184/200, accuracy0.337, loss2.400, learning rate 0.000\nEstimated reamining runtime: 5:51:31.985022\n--Validation--\nValidation : Accuracy: 0.485, Loss: 1.813\n=== Epoch: 185 ===\nStep: 0/865, accuracy0.359, loss2.259, learning rate 0.0000591 \nStep: 100/865, accuracy0.250, loss2.571, learning rate 0.0000591 \nStep: 200/865, accuracy0.328, loss2.437, learning rate 0.0000590 \nStep: 300/865, accuracy0.312, loss2.464, learning rate 0.0000590 \nStep: 400/865, accuracy0.266, loss2.564, learning rate 0.0000590 \nStep: 500/865, accuracy0.266, loss2.641, learning rate 0.0000589 \nStep: 600/865, accuracy0.281, loss2.893, learning rate 0.0000589 \nStep: 700/865, accuracy0.406, loss2.214, learning rate 0.0000589 \nStep: 800/865, accuracy0.453, loss2.123, learning rate 0.0000588 \nStep: 864/865, accuracy0.323, loss2.119, learning rate 0.0000588 \nEpoch: 185/200, accuracy0.337, loss2.401, learning rate 0.000\nEstimated reamining runtime: 5:51:30.649241\n--Validation--\nValidation : Accuracy: 0.480, Loss: 1.816\n=== Epoch: 186 ===\nStep: 0/865, accuracy0.469, loss2.405, learning rate 0.0000588 \nStep: 100/865, accuracy0.406, loss2.164, learning rate 0.0000588 \nStep: 200/865, accuracy0.391, loss2.285, learning rate 0.0000587 \nStep: 300/865, accuracy0.391, loss2.330, learning rate 0.0000587 \nStep: 400/865, accuracy0.156, loss2.741, learning rate 0.0000587 \nStep: 500/865, accuracy0.281, loss2.694, learning rate 0.0000586 \nStep: 600/865, accuracy0.422, loss2.294, learning rate 0.0000586 \nStep: 700/865, accuracy0.328, loss2.547, learning rate 0.0000586 \nStep: 800/865, accuracy0.422, loss2.071, learning rate 0.0000585 \nStep: 864/865, accuracy0.323, loss2.401, learning rate 0.0000585 \nEpoch: 186/200, accuracy0.340, loss2.398, learning rate 0.000\nEstimated reamining runtime: 5:51:28.430222\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.485, Loss: 1.795\n=== Epoch: 187 ===\nStep: 0/865, accuracy0.297, loss2.527, learning rate 0.0000585 \nStep: 100/865, accuracy0.422, loss2.233, learning rate 0.0000585 \nStep: 200/865, accuracy0.297, loss2.657, learning rate 0.0000584 \nStep: 300/865, accuracy0.391, loss2.491, learning rate 0.0000584 \nStep: 400/865, accuracy0.312, loss2.418, learning rate 0.0000584 \nStep: 500/865, accuracy0.266, loss2.805, learning rate 0.0000583 \nStep: 600/865, accuracy0.281, loss2.513, learning rate 0.0000583 \nStep: 700/865, accuracy0.391, loss2.269, learning rate 0.0000583 \nStep: 800/865, accuracy0.406, loss2.167, learning rate 0.0000582 \nStep: 864/865, accuracy0.258, loss2.961, learning rate 0.0000582 \nEpoch: 187/200, accuracy0.335, loss2.407, learning rate 0.000\nEstimated reamining runtime: 5:51:24.513879\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.488, Loss: 1.795\n=== Epoch: 188 ===\nStep: 0/865, accuracy0.438, loss2.286, learning rate 0.0000582 \nStep: 100/865, accuracy0.297, loss2.341, learning rate 0.0000582 \nStep: 200/865, accuracy0.359, loss2.445, learning rate 0.0000582 \nStep: 300/865, accuracy0.312, loss2.528, learning rate 0.0000581 \nStep: 400/865, accuracy0.406, loss2.042, learning rate 0.0000581 \nStep: 500/865, accuracy0.344, loss2.445, learning rate 0.0000581 \nStep: 600/865, accuracy0.234, loss2.639, learning rate 0.0000580 \nStep: 700/865, accuracy0.344, loss2.170, learning rate 0.0000580 \nStep: 800/865, accuracy0.281, loss2.469, learning rate 0.0000580 \nStep: 864/865, accuracy0.290, loss2.287, learning rate 0.0000579 \nEpoch: 188/200, accuracy0.338, loss2.397, learning rate 0.000\nEstimated reamining runtime: 5:51:21.258292\n--Validation--\nValidation : Accuracy: 0.489, Loss: 1.796\n=== Epoch: 189 ===\nStep: 0/865, accuracy0.422, loss2.042, learning rate 0.0000579 \nStep: 100/865, accuracy0.219, loss2.690, learning rate 0.0000579 \nStep: 200/865, accuracy0.375, loss2.168, learning rate 0.0000579 \nStep: 300/865, accuracy0.312, loss2.335, learning rate 0.0000578 \nStep: 400/865, accuracy0.328, loss2.550, learning rate 0.0000578 \nStep: 500/865, accuracy0.344, loss2.310, learning rate 0.0000578 \nStep: 600/865, accuracy0.344, loss2.526, learning rate 0.0000577 \nStep: 700/865, accuracy0.359, loss2.479, learning rate 0.0000577 \nStep: 800/865, accuracy0.359, loss2.630, learning rate 0.0000577 \nStep: 864/865, accuracy0.226, loss2.724, learning rate 0.0000576 \nEpoch: 189/200, accuracy0.337, loss2.399, learning rate 0.000\nEstimated reamining runtime: 5:51:18.179101\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.491, Loss: 1.794\n=== Epoch: 190 ===\nStep: 0/865, accuracy0.344, loss2.703, learning rate 0.0000576 \nStep: 100/865, accuracy0.406, loss2.327, learning rate 0.0000576 \nStep: 200/865, accuracy0.375, loss2.190, learning rate 0.0000576 \nStep: 300/865, accuracy0.359, loss2.308, learning rate 0.0000575 \nStep: 400/865, accuracy0.344, loss2.563, learning rate 0.0000575 \nStep: 500/865, accuracy0.266, loss2.404, learning rate 0.0000575 \nStep: 600/865, accuracy0.281, loss2.517, learning rate 0.0000574 \nStep: 700/865, accuracy0.406, loss2.358, learning rate 0.0000574 \nStep: 800/865, accuracy0.344, loss2.388, learning rate 0.0000574 \nStep: 864/865, accuracy0.290, loss2.468, learning rate 0.0000574 \nEpoch: 190/200, accuracy0.337, loss2.399, learning rate 0.000\nEstimated reamining runtime: 5:51:14.722356\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.491, Loss: 1.792\n=== Epoch: 191 ===\nStep: 0/865, accuracy0.359, loss2.201, learning rate 0.0000574 \nStep: 100/865, accuracy0.297, loss2.524, learning rate 0.0000573 \nStep: 200/865, accuracy0.438, loss2.303, learning rate 0.0000573 \nStep: 300/865, accuracy0.281, loss2.601, learning rate 0.0000573 \nStep: 400/865, accuracy0.297, loss2.374, learning rate 0.0000572 \nStep: 500/865, accuracy0.438, loss2.236, learning rate 0.0000572 \nStep: 600/865, accuracy0.297, loss2.475, learning rate 0.0000572 \nStep: 700/865, accuracy0.344, loss2.445, learning rate 0.0000571 \nStep: 800/865, accuracy0.391, loss2.082, learning rate 0.0000571 \nStep: 864/865, accuracy0.290, loss2.307, learning rate 0.0000571 \nEpoch: 191/200, accuracy0.340, loss2.396, learning rate 0.000\nEstimated reamining runtime: 5:51:11.179622\n--Validation--\nValidation : Accuracy: 0.487, Loss: 1.793\n=== Epoch: 192 ===\nStep: 0/865, accuracy0.438, loss2.107, learning rate 0.0000571 \nStep: 100/865, accuracy0.266, loss2.662, learning rate 0.0000570 \nStep: 200/865, accuracy0.266, loss2.535, learning rate 0.0000570 \nStep: 300/865, accuracy0.312, loss2.404, learning rate 0.0000570 \nStep: 400/865, accuracy0.344, loss2.606, learning rate 0.0000569 \nStep: 500/865, accuracy0.344, loss2.381, learning rate 0.0000569 \nStep: 600/865, accuracy0.328, loss2.434, learning rate 0.0000569 \nStep: 700/865, accuracy0.266, loss2.482, learning rate 0.0000568 \nStep: 800/865, accuracy0.234, loss2.664, learning rate 0.0000568 \nStep: 864/865, accuracy0.452, loss2.382, learning rate 0.0000568 \nEpoch: 192/200, accuracy0.339, loss2.398, learning rate 0.000\nEstimated reamining runtime: 5:51:07.792753\n--Validation--\nValidation : Accuracy: 0.489, Loss: 1.799\n=== Epoch: 193 ===\nStep: 0/865, accuracy0.359, loss2.276, learning rate 0.0000568 \nStep: 100/865, accuracy0.375, loss2.326, learning rate 0.0000568 \nStep: 200/865, accuracy0.250, loss2.794, learning rate 0.0000567 \nStep: 300/865, accuracy0.359, loss2.250, learning rate 0.0000567 \nStep: 400/865, accuracy0.469, loss2.129, learning rate 0.0000567 \nStep: 500/865, accuracy0.250, loss2.570, learning rate 0.0000566 \nStep: 600/865, accuracy0.391, loss2.446, learning rate 0.0000566 \nStep: 700/865, accuracy0.391, loss2.093, learning rate 0.0000566 \nStep: 800/865, accuracy0.391, loss2.230, learning rate 0.0000565 \nStep: 864/865, accuracy0.419, loss2.312, learning rate 0.0000565 \nEpoch: 193/200, accuracy0.339, loss2.397, learning rate 0.000\nEstimated reamining runtime: 5:51:04.785648\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.493, Loss: 1.787\n=== Epoch: 194 ===\nStep: 0/865, accuracy0.250, loss2.932, learning rate 0.0000565 \nStep: 100/865, accuracy0.453, loss2.302, learning rate 0.0000565 \nStep: 200/865, accuracy0.312, loss2.401, learning rate 0.0000565 \nStep: 300/865, accuracy0.406, loss2.155, learning rate 0.0000564 \nStep: 400/865, accuracy0.281, loss2.672, learning rate 0.0000564 \nStep: 500/865, accuracy0.281, loss2.451, learning rate 0.0000564 \nStep: 600/865, accuracy0.391, loss2.305, learning rate 0.0000563 \nStep: 700/865, accuracy0.328, loss2.332, learning rate 0.0000563 \nStep: 800/865, accuracy0.312, loss2.295, learning rate 0.0000563 \nStep: 864/865, accuracy0.387, loss2.304, learning rate 0.0000562 \nEpoch: 194/200, accuracy0.342, loss2.392, learning rate 0.000\nEstimated reamining runtime: 5:51:01.428609\n--Validation--\nValidation : Accuracy: 0.489, Loss: 1.795\n=== Epoch: 195 ===\nStep: 0/865, accuracy0.328, loss2.670, learning rate 0.0000562 \nStep: 100/865, accuracy0.266, loss2.580, learning rate 0.0000562 \nStep: 200/865, accuracy0.266, loss2.525, learning rate 0.0000562 \nStep: 300/865, accuracy0.344, loss2.562, learning rate 0.0000561 \nStep: 400/865, accuracy0.375, loss2.273, learning rate 0.0000561 \nStep: 500/865, accuracy0.250, loss2.575, learning rate 0.0000561 \nStep: 600/865, accuracy0.312, loss2.616, learning rate 0.0000561 \nStep: 700/865, accuracy0.312, loss2.481, learning rate 0.0000560 \nStep: 800/865, accuracy0.297, loss2.595, learning rate 0.0000560 \nStep: 864/865, accuracy0.355, loss2.603, learning rate 0.0000560 \nEpoch: 195/200, accuracy0.342, loss2.385, learning rate 0.000\nEstimated reamining runtime: 5:50:58.363516\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.488, Loss: 1.785\n=== Epoch: 196 ===\nStep: 0/865, accuracy0.406, loss2.089, learning rate 0.0000560 \nStep: 100/865, accuracy0.359, loss2.478, learning rate 0.0000559 \nStep: 200/865, accuracy0.312, loss2.432, learning rate 0.0000559 \nStep: 300/865, accuracy0.344, loss2.684, learning rate 0.0000559 \nStep: 400/865, accuracy0.391, loss2.476, learning rate 0.0000558 \nStep: 500/865, accuracy0.359, loss2.333, learning rate 0.0000558 \nStep: 600/865, accuracy0.312, loss2.589, learning rate 0.0000558 \nStep: 700/865, accuracy0.250, loss2.921, learning rate 0.0000557 \nStep: 800/865, accuracy0.344, loss2.360, learning rate 0.0000557 \nStep: 864/865, accuracy0.452, loss2.127, learning rate 0.0000557 \nEpoch: 196/200, accuracy0.341, loss2.400, learning rate 0.000\nEstimated reamining runtime: 5:50:56.281996\n--Validation--\nValidation : Accuracy: 0.489, Loss: 1.789\n=== Epoch: 197 ===\nStep: 0/865, accuracy0.172, loss2.696, learning rate 0.0000557 \nStep: 100/865, accuracy0.391, loss2.267, learning rate 0.0000557 \nStep: 200/865, accuracy0.297, loss2.432, learning rate 0.0000556 \nStep: 300/865, accuracy0.344, loss2.453, learning rate 0.0000556 \nStep: 400/865, accuracy0.297, loss2.737, learning rate 0.0000556 \nStep: 500/865, accuracy0.328, loss2.426, learning rate 0.0000555 \nStep: 600/865, accuracy0.375, loss2.558, learning rate 0.0000555 \nStep: 700/865, accuracy0.281, loss2.361, learning rate 0.0000555 \nStep: 800/865, accuracy0.406, loss2.326, learning rate 0.0000555 \nStep: 864/865, accuracy0.355, loss2.055, learning rate 0.0000554 \nEpoch: 197/200, accuracy0.338, loss2.398, learning rate 0.000\nEstimated reamining runtime: 5:50:54.848873\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.491, Loss: 1.784\n=== Epoch: 198 ===\nStep: 0/865, accuracy0.281, loss2.610, learning rate 0.0000554 \nStep: 100/865, accuracy0.406, loss2.357, learning rate 0.0000554 \nStep: 200/865, accuracy0.359, loss2.464, learning rate 0.0000554 \nStep: 300/865, accuracy0.312, loss2.618, learning rate 0.0000553 \nStep: 400/865, accuracy0.281, loss2.257, learning rate 0.0000553 \nStep: 500/865, accuracy0.281, loss2.682, learning rate 0.0000553 \nStep: 600/865, accuracy0.375, loss2.103, learning rate 0.0000552 \nStep: 700/865, accuracy0.406, loss2.057, learning rate 0.0000552 \nStep: 800/865, accuracy0.406, loss2.288, learning rate 0.0000552 \nStep: 864/865, accuracy0.355, loss2.160, learning rate 0.0000552 \nEpoch: 198/200, accuracy0.339, loss2.394, learning rate 0.000\nEstimated reamining runtime: 5:50:54.434099\n--Validation--\nNew best model ... saving\nValidation : Accuracy: 0.492, Loss: 1.781\n=== Epoch: 199 ===\nStep: 0/865, accuracy0.391, loss2.270, learning rate 0.0000552 \nStep: 100/865, accuracy0.422, loss1.976, learning rate 0.0000551 \nStep: 200/865, accuracy0.344, loss2.181, learning rate 0.0000551 \nStep: 300/865, accuracy0.375, loss2.765, learning rate 0.0000551 \nStep: 400/865, accuracy0.250, loss2.558, learning rate 0.0000550 \nStep: 500/865, accuracy0.344, loss2.401, learning rate 0.0000550 \nStep: 600/865, accuracy0.438, loss2.017, learning rate 0.0000550 \nStep: 700/865, accuracy0.312, loss2.539, learning rate 0.0000550 \nStep: 800/865, accuracy0.312, loss2.296, learning rate 0.0000549 \nStep: 864/865, accuracy0.323, loss2.393, learning rate 0.0000549 \nEpoch: 199/200, accuracy0.337, loss2.402, learning rate 0.000\nEstimated reamining runtime: 5:50:55.121062\n--Validation--\nValidation : Accuracy: 0.483, Loss: 1.802\n=== Epoch: 200 ===\nStep: 0/865, accuracy0.344, loss2.325, learning rate 0.0000549 \nStep: 100/865, accuracy0.344, loss2.318, learning rate 0.0000549 \nStep: 200/865, accuracy0.453, loss2.397, learning rate 0.0000548 \nStep: 300/865, accuracy0.406, loss2.276, learning rate 0.0000548 \nStep: 400/865, accuracy0.344, loss2.486, learning rate 0.0000548 \nStep: 500/865, accuracy0.484, loss2.019, learning rate 0.0000548 \nStep: 600/865, accuracy0.328, loss2.622, learning rate 0.0000547 \nStep: 700/865, accuracy0.328, loss2.338, learning rate 0.0000547 \nStep: 800/865, accuracy0.391, loss2.251, learning rate 0.0000547 \nStep: 864/865, accuracy0.387, loss2.361, learning rate 0.0000546 \nEpoch: 200/200, accuracy0.340, loss2.393, learning rate 0.000\nEstimated reamining runtime: 5:50:59.377593\n--Validation--\nValidation : Accuracy: 0.491, Loss: 1.783\n"
]
],
[
[
"### Training pytorch Mode",
"_____no_output_____"
]
],
[
[
"#### Net and training function \nclass PlantDiseaseNet(nn.Module):\n def __init__(self, input_size=1024, l1=1024, l2=512, output_size=39, dropout_p=0.5):\n super(PlantDiseaseNet, self).__init__()\n self.fc1 = nn.Linear(input_size, l1)\n self.fc2 = nn.Linear(l1, l2)\n self.fc3 = nn.Linear(l2, output_size)\n self.dropout = nn.Dropout(dropout_p)\n\n def forward(self, x):\n x = x.view(x.shape[0], -1)\n x = F.relu(self.fc1(x))\n x = self.dropout(x)\n x = F.relu(self.fc2(x))\n x = self.dropout(x)\n x = F.log_softmax(self.fc3(x), dim=1)\n return x",
"_____no_output_____"
],
[
"def train(model, train_loader, validation_loader, config, n_epochs=10, stopping_treshold=None):\n\n if torch.cuda.is_available():\n print('CUDA is available! Training on GPU ...')\n model.cuda()\n\n\n # Loss and optimizer setup \n criterion = nn.NLLLoss()\n optimizer = optim.Adam(model.parameters(), lr=config['learning_rate'])\n\n # Setting minimum validation loss to inf\n validation_loss_minimum = np.Inf \n train_loss_history = []\n validation_loss_history = []\n\n for epoch in range(1, n_epochs +1):\n\n training_loss = 0.0\n validation_loss = 0.0\n\n # Training loop\n training_accuracies = []\n for X, y in train_loader:\n \n # Moving data to gpu if using \n if torch.cuda.is_available():\n X, y = X.cuda(), y.cuda()\n \n # clear the gradients of all optimized variables\n optimizer.zero_grad()\n # forward pass: compute predicted outputs by passing inputs to the model\n output = model(X)\n # calculate the batch loss\n loss = criterion(output, y)\n # backward pass: compute gradient of the loss with respect to model parameters\n loss.backward()\n # perform a single optimization step (parameter update)\n optimizer.step()\n # update training loss\n training_loss += loss.item()*X.size(0)\n\n # calculating accuracy\n ps = torch.exp(output)\n top_p, top_class = ps.topk(1, dim=1)\n equals = top_class == y.view(*top_class.shape)\n training_accuracies.append(torch.mean(equals.type(torch.FloatTensor)).item())\n\n # Validation Loop\n with torch.no_grad():\n accuracies = []\n for X, y in validation_loader:\n\n # Moving data to gpu if using \n if torch.cuda.is_available():\n X, y = X.cuda(), y.cuda()\n # forward pass: compute predicted outputs by passing inputs to the model\n output = model(X)\n # calculate the batch loss\n loss = criterion(output, y)\n # update validation loss\n validation_loss += loss.item()*X.size(0)\n\n # calculating accuracy\n ps = torch.exp(output)\n top_p, top_class = ps.topk(1, dim=1)\n equals = top_class == y.view(*top_class.shape)\n accuracies.append(torch.mean(equals.type(torch.FloatTensor)).item())\n \n # Mean loss \n mean_training_loss = training_loss/len(train_loader.sampler)\n mean_validation_loss = validation_loss/len(validation_loader.sampler)\n mean_train_accuracy = sum(training_accuracies)/len(training_accuracies)\n mean_accuracy = sum(accuracies)/len(accuracies)\n train_loss_history.append(mean_training_loss)\n validation_loss_history.append(mean_validation_loss)\n\n # Printing epoch stats\n print(f'Epoch: {epoch}/{n_epochs}, ' +\\\n f'Training Loss: {mean_training_loss:.3f}, '+\\\n f'Train accuracy {mean_train_accuracy:.3f} ' +\\\n f'Validation Loss: {mean_validation_loss:.3f}, '+\\\n f'Validation accuracy {mean_accuracy:.3f}')\n\n # logging with mlflow \n if mlflow.active_run():\n mlflow.log_metric('loss', mean_training_loss, step=epoch)\n mlflow.log_metric('accuracy', mean_train_accuracy, step=epoch)\n mlflow.log_metric('validation_accuracy', mean_accuracy, step=epoch)\n mlflow.log_metric('validation_loss', mean_validation_loss, step=epoch)\n\n # Testing for early stopping\n # Testing for early stopping\n if stopping_treshold:\n if mean_validation_loss < validation_loss_minimum:\n validation_loss_minimum = mean_validation_loss\n print('New minimum validation loss (saving model)')\n save_pth = os.path.join('models',f'{config[\"name\"]}.pt')\n torch.save(model.state_dict(), save_pth)\n elif len([v for v in validation_loss_history[-stopping_treshold:] if v > validation_loss_minimum]) >= stopping_treshold:\n print(f\"Stopping early at epoch: {epoch}/{n_epochs}\")\n break",
"_____no_output_____"
]
],
[
[
"### Training Pytorch models",
"_____no_output_____"
]
],
[
[
"# getting configs\n# # Getting Configs \npar = pt_runs[0].data.params\nconfig = {'data_split': par['data_split'],\n 'decay': np.float64(par['decay']),\n 'dropout': np.float64(par['dropout']),\n 'framework': par['framework'],\n 'learning_rate': np.float64(par['learning_rate']),\n 'max_epochs': int(par['max_epochs']),\n 'resolution': int(par['resolution']),\n 'type': par['type'],\n 'name': 'top_pytorch'} ",
"_____no_output_____"
],
[
"# Set up data loaders\ntrain_loader = torch.utils.data.DataLoader(train_data, batch_size=64, shuffle=True)\nvalidation_loader = torch.utils.data.DataLoader(test_data, batch_size=64, shuffle=True)\n\n# Initializing the model\nmdl = PlantDiseaseNet(input_size=config['resolution']**2, dropout_p=config['dropout'])\nprint(\"Starting training on network: \\n\", mdl)\n\nmlflow.set_experiment(\"Plant Leaf Disease\")\nwith mlflow.start_run():\n mlflow.log_params(config)\n tlh, vlh = train(mdl, train_loader, validation_loader, config, n_epochs=config['max_epochs'], stopping_treshold=50)",
"Starting training on network: \n PlantDiseaseNet(\n (fc1): Linear(in_features=1024, out_features=1024, bias=True)\n (fc2): Linear(in_features=1024, out_features=512, bias=True)\n (fc3): Linear(in_features=512, out_features=39, bias=True)\n (dropout): Dropout(p=0.2, inplace=False)\n)\nCUDA is available! Training on GPU ...\nEpoch: 1/200, Training Loss: 3.110, Train accuracy 0.184 Validation Loss: 2.768, Validation accuracy 0.264\nNew minimum validation loss (saving model)\nEpoch: 2/200, Training Loss: 2.916, Train accuracy 0.221 Validation Loss: 2.621, Validation accuracy 0.301\nNew minimum validation loss (saving model)\nEpoch: 3/200, Training Loss: 2.835, Train accuracy 0.240 Validation Loss: 2.465, Validation accuracy 0.324\nNew minimum validation loss (saving model)\nEpoch: 4/200, Training Loss: 2.767, Train accuracy 0.250 Validation Loss: 2.389, Validation accuracy 0.337\nNew minimum validation loss (saving model)\nEpoch: 5/200, Training Loss: 2.745, Train accuracy 0.256 Validation Loss: 2.385, Validation accuracy 0.342\nNew minimum validation loss (saving model)\nEpoch: 6/200, Training Loss: 2.712, Train accuracy 0.262 Validation Loss: 2.330, Validation accuracy 0.356\nNew minimum validation loss (saving model)\nEpoch: 7/200, Training Loss: 2.696, Train accuracy 0.267 Validation Loss: 2.343, Validation accuracy 0.349\nEpoch: 8/200, Training Loss: 2.679, Train accuracy 0.271 Validation Loss: 2.307, Validation accuracy 0.365\nNew minimum validation loss (saving model)\nEpoch: 9/200, Training Loss: 2.659, Train accuracy 0.273 Validation Loss: 2.321, Validation accuracy 0.351\nEpoch: 10/200, Training Loss: 2.653, Train accuracy 0.276 Validation Loss: 2.316, Validation accuracy 0.356\nEpoch: 11/200, Training Loss: 2.644, Train accuracy 0.281 Validation Loss: 2.279, Validation accuracy 0.360\nNew minimum validation loss (saving model)\nEpoch: 12/200, Training Loss: 2.632, Train accuracy 0.283 Validation Loss: 2.234, Validation accuracy 0.390\nNew minimum validation loss (saving model)\nEpoch: 13/200, Training Loss: 2.626, Train accuracy 0.285 Validation Loss: 2.250, Validation accuracy 0.365\nEpoch: 14/200, Training Loss: 2.619, Train accuracy 0.287 Validation Loss: 2.295, Validation accuracy 0.361\nEpoch: 15/200, Training Loss: 2.621, Train accuracy 0.286 Validation Loss: 2.211, Validation accuracy 0.380\nNew minimum validation loss (saving model)\nEpoch: 16/200, Training Loss: 2.601, Train accuracy 0.292 Validation Loss: 2.144, Validation accuracy 0.391\nNew minimum validation loss (saving model)\nEpoch: 17/200, Training Loss: 2.597, Train accuracy 0.291 Validation Loss: 2.198, Validation accuracy 0.382\nEpoch: 18/200, Training Loss: 2.589, Train accuracy 0.292 Validation Loss: 2.170, Validation accuracy 0.400\nEpoch: 19/200, Training Loss: 2.584, Train accuracy 0.296 Validation Loss: 2.200, Validation accuracy 0.374\nEpoch: 20/200, Training Loss: 2.573, Train accuracy 0.295 Validation Loss: 2.163, Validation accuracy 0.382\nEpoch: 21/200, Training Loss: 2.574, Train accuracy 0.297 Validation Loss: 2.150, Validation accuracy 0.394\nEpoch: 22/200, Training Loss: 2.571, Train accuracy 0.300 Validation Loss: 2.171, Validation accuracy 0.394\nEpoch: 23/200, Training Loss: 2.559, Train accuracy 0.299 Validation Loss: 2.122, Validation accuracy 0.402\nNew minimum validation loss (saving model)\nEpoch: 24/200, Training Loss: 2.560, Train accuracy 0.302 Validation Loss: 2.131, Validation accuracy 0.395\nEpoch: 25/200, Training Loss: 2.551, Train accuracy 0.303 Validation Loss: 2.185, Validation accuracy 0.390\nEpoch: 26/200, Training Loss: 2.556, Train accuracy 0.299 Validation Loss: 2.125, Validation accuracy 0.395\nEpoch: 27/200, Training Loss: 2.549, Train accuracy 0.304 Validation Loss: 2.118, Validation accuracy 0.412\nNew minimum validation loss (saving model)\nEpoch: 28/200, Training Loss: 2.546, Train accuracy 0.305 Validation Loss: 2.128, Validation accuracy 0.405\nEpoch: 29/200, Training Loss: 2.535, Train accuracy 0.305 Validation Loss: 2.141, Validation accuracy 0.406\nEpoch: 30/200, Training Loss: 2.534, Train accuracy 0.305 Validation Loss: 2.161, Validation accuracy 0.396\nEpoch: 31/200, Training Loss: 2.530, Train accuracy 0.308 Validation Loss: 2.114, Validation accuracy 0.397\nNew minimum validation loss (saving model)\nEpoch: 32/200, Training Loss: 2.518, Train accuracy 0.314 Validation Loss: 2.111, Validation accuracy 0.398\nNew minimum validation loss (saving model)\nEpoch: 33/200, Training Loss: 2.523, Train accuracy 0.308 Validation Loss: 2.099, Validation accuracy 0.398\nNew minimum validation loss (saving model)\nEpoch: 34/200, Training Loss: 2.520, Train accuracy 0.310 Validation Loss: 2.147, Validation accuracy 0.403\nEpoch: 35/200, Training Loss: 2.512, Train accuracy 0.309 Validation Loss: 2.093, Validation accuracy 0.413\nNew minimum validation loss (saving model)\nEpoch: 36/200, Training Loss: 2.517, Train accuracy 0.311 Validation Loss: 2.091, Validation accuracy 0.407\nNew minimum validation loss (saving model)\nEpoch: 37/200, Training Loss: 2.510, Train accuracy 0.313 Validation Loss: 2.160, Validation accuracy 0.382\nEpoch: 38/200, Training Loss: 2.506, Train accuracy 0.314 Validation Loss: 2.121, Validation accuracy 0.400\nEpoch: 39/200, Training Loss: 2.509, Train accuracy 0.310 Validation Loss: 2.116, Validation accuracy 0.408\nEpoch: 40/200, Training Loss: 2.503, Train accuracy 0.314 Validation Loss: 2.076, Validation accuracy 0.416\nNew minimum validation loss (saving model)\nEpoch: 41/200, Training Loss: 2.495, Train accuracy 0.316 Validation Loss: 2.050, Validation accuracy 0.418\nNew minimum validation loss (saving model)\nEpoch: 42/200, Training Loss: 2.491, Train accuracy 0.316 Validation Loss: 2.056, Validation accuracy 0.416\nEpoch: 43/200, Training Loss: 2.491, Train accuracy 0.317 Validation Loss: 2.107, Validation accuracy 0.402\nEpoch: 44/200, Training Loss: 2.496, Train accuracy 0.317 Validation Loss: 2.078, Validation accuracy 0.417\nEpoch: 45/200, Training Loss: 2.489, Train accuracy 0.316 Validation Loss: 2.088, Validation accuracy 0.416\nEpoch: 46/200, Training Loss: 2.490, Train accuracy 0.317 Validation Loss: 2.115, Validation accuracy 0.399\nEpoch: 47/200, Training Loss: 2.481, Train accuracy 0.319 Validation Loss: 2.054, Validation accuracy 0.417\nEpoch: 48/200, Training Loss: 2.476, Train accuracy 0.322 Validation Loss: 2.096, Validation accuracy 0.409\nEpoch: 49/200, Training Loss: 2.475, Train accuracy 0.321 Validation Loss: 2.038, Validation accuracy 0.420\nNew minimum validation loss (saving model)\nEpoch: 50/200, Training Loss: 2.476, Train accuracy 0.320 Validation Loss: 2.004, Validation accuracy 0.421\nNew minimum validation loss (saving model)\nEpoch: 51/200, Training Loss: 2.479, Train accuracy 0.321 Validation Loss: 2.037, Validation accuracy 0.420\nEpoch: 52/200, Training Loss: 2.472, Train accuracy 0.322 Validation Loss: 2.016, Validation accuracy 0.426\nEpoch: 53/200, Training Loss: 2.468, Train accuracy 0.323 Validation Loss: 2.065, Validation accuracy 0.402\nEpoch: 54/200, Training Loss: 2.460, Train accuracy 0.323 Validation Loss: 2.034, Validation accuracy 0.420\nEpoch: 55/200, Training Loss: 2.461, Train accuracy 0.324 Validation Loss: 2.028, Validation accuracy 0.422\nEpoch: 56/200, Training Loss: 2.454, Train accuracy 0.326 Validation Loss: 2.036, Validation accuracy 0.413\nEpoch: 57/200, Training Loss: 2.455, Train accuracy 0.326 Validation Loss: 2.082, Validation accuracy 0.409\nEpoch: 58/200, Training Loss: 2.462, Train accuracy 0.326 Validation Loss: 2.082, Validation accuracy 0.412\nEpoch: 59/200, Training Loss: 2.456, Train accuracy 0.326 Validation Loss: 2.041, Validation accuracy 0.416\nEpoch: 60/200, Training Loss: 2.445, Train accuracy 0.330 Validation Loss: 2.058, Validation accuracy 0.419\nEpoch: 61/200, Training Loss: 2.456, Train accuracy 0.326 Validation Loss: 2.040, Validation accuracy 0.426\nEpoch: 62/200, Training Loss: 2.457, Train accuracy 0.325 Validation Loss: 2.046, Validation accuracy 0.414\nEpoch: 63/200, Training Loss: 2.441, Train accuracy 0.329 Validation Loss: 2.075, Validation accuracy 0.412\nEpoch: 64/200, Training Loss: 2.454, Train accuracy 0.327 Validation Loss: 2.040, Validation accuracy 0.417\nEpoch: 65/200, Training Loss: 2.448, Train accuracy 0.327 Validation Loss: 2.002, Validation accuracy 0.424\nNew minimum validation loss (saving model)\nEpoch: 66/200, Training Loss: 2.448, Train accuracy 0.328 Validation Loss: 2.058, Validation accuracy 0.412\nEpoch: 67/200, Training Loss: 2.445, Train accuracy 0.329 Validation Loss: 2.036, Validation accuracy 0.429\nEpoch: 68/200, Training Loss: 2.442, Train accuracy 0.333 Validation Loss: 2.011, Validation accuracy 0.420\nEpoch: 69/200, Training Loss: 2.434, Train accuracy 0.331 Validation Loss: 2.066, Validation accuracy 0.410\nEpoch: 70/200, Training Loss: 2.441, Train accuracy 0.331 Validation Loss: 2.012, Validation accuracy 0.425\nEpoch: 71/200, Training Loss: 2.432, Train accuracy 0.333 Validation Loss: 1.956, Validation accuracy 0.439\nNew minimum validation loss (saving model)\nEpoch: 72/200, Training Loss: 2.434, Train accuracy 0.330 Validation Loss: 2.072, Validation accuracy 0.413\nEpoch: 73/200, Training Loss: 2.430, Train accuracy 0.331 Validation Loss: 2.052, Validation accuracy 0.417\nEpoch: 74/200, Training Loss: 2.431, Train accuracy 0.329 Validation Loss: 2.013, Validation accuracy 0.428\nEpoch: 75/200, Training Loss: 2.431, Train accuracy 0.333 Validation Loss: 1.991, Validation accuracy 0.433\nEpoch: 76/200, Training Loss: 2.431, Train accuracy 0.330 Validation Loss: 1.972, Validation accuracy 0.442\nEpoch: 77/200, Training Loss: 2.427, Train accuracy 0.332 Validation Loss: 2.026, Validation accuracy 0.431\nEpoch: 78/200, Training Loss: 2.432, Train accuracy 0.332 Validation Loss: 2.074, Validation accuracy 0.432\nEpoch: 79/200, Training Loss: 2.427, Train accuracy 0.332 Validation Loss: 2.053, Validation accuracy 0.422\nEpoch: 80/200, Training Loss: 2.416, Train accuracy 0.332 Validation Loss: 1.999, Validation accuracy 0.424\nEpoch: 81/200, Training Loss: 2.414, Train accuracy 0.336 Validation Loss: 2.016, Validation accuracy 0.420\nEpoch: 82/200, Training Loss: 2.424, Train accuracy 0.335 Validation Loss: 2.017, Validation accuracy 0.424\nEpoch: 83/200, Training Loss: 2.429, Train accuracy 0.333 Validation Loss: 2.016, Validation accuracy 0.434\nEpoch: 84/200, Training Loss: 2.418, Train accuracy 0.332 Validation Loss: 1.998, Validation accuracy 0.433\nEpoch: 85/200, Training Loss: 2.414, Train accuracy 0.337 Validation Loss: 1.963, Validation accuracy 0.436\nEpoch: 86/200, Training Loss: 2.408, Train accuracy 0.335 Validation Loss: 2.013, Validation accuracy 0.422\nEpoch: 87/200, Training Loss: 2.422, Train accuracy 0.337 Validation Loss: 1.989, Validation accuracy 0.428\nEpoch: 88/200, Training Loss: 2.417, Train accuracy 0.333 Validation Loss: 2.004, Validation accuracy 0.440\nEpoch: 89/200, Training Loss: 2.414, Train accuracy 0.335 Validation Loss: 1.973, Validation accuracy 0.435\nEpoch: 90/200, Training Loss: 2.421, Train accuracy 0.335 Validation Loss: 1.970, Validation accuracy 0.436\nEpoch: 91/200, Training Loss: 2.407, Train accuracy 0.339 Validation Loss: 2.021, Validation accuracy 0.431\nEpoch: 92/200, Training Loss: 2.413, Train accuracy 0.341 Validation Loss: 1.964, Validation accuracy 0.445\nEpoch: 93/200, Training Loss: 2.413, Train accuracy 0.336 Validation Loss: 1.990, Validation accuracy 0.428\nEpoch: 94/200, Training Loss: 2.401, Train accuracy 0.341 Validation Loss: 1.989, Validation accuracy 0.435\nEpoch: 95/200, Training Loss: 2.397, Train accuracy 0.339 Validation Loss: 2.033, Validation accuracy 0.420\nEpoch: 96/200, Training Loss: 2.404, Train accuracy 0.340 Validation Loss: 2.006, Validation accuracy 0.423\nEpoch: 97/200, Training Loss: 2.414, Train accuracy 0.337 Validation Loss: 1.989, Validation accuracy 0.432\nEpoch: 98/200, Training Loss: 2.407, Train accuracy 0.339 Validation Loss: 1.986, Validation accuracy 0.439\nEpoch: 99/200, Training Loss: 2.407, Train accuracy 0.339 Validation Loss: 1.959, Validation accuracy 0.440\nEpoch: 100/200, Training Loss: 2.403, Train accuracy 0.338 Validation Loss: 1.911, Validation accuracy 0.458\nNew minimum validation loss (saving model)\nEpoch: 101/200, Training Loss: 2.406, Train accuracy 0.340 Validation Loss: 1.987, Validation accuracy 0.440\nEpoch: 102/200, Training Loss: 2.401, Train accuracy 0.338 Validation Loss: 2.017, Validation accuracy 0.423\nEpoch: 103/200, Training Loss: 2.408, Train accuracy 0.342 Validation Loss: 1.982, Validation accuracy 0.443\nEpoch: 104/200, Training Loss: 2.392, Train accuracy 0.340 Validation Loss: 1.972, Validation accuracy 0.437\nEpoch: 105/200, Training Loss: 2.393, Train accuracy 0.342 Validation Loss: 1.951, Validation accuracy 0.440\nEpoch: 106/200, Training Loss: 2.394, Train accuracy 0.343 Validation Loss: 1.961, Validation accuracy 0.451\nEpoch: 107/200, Training Loss: 2.391, Train accuracy 0.345 Validation Loss: 1.962, Validation accuracy 0.440\nEpoch: 108/200, Training Loss: 2.396, Train accuracy 0.342 Validation Loss: 1.951, Validation accuracy 0.445\nEpoch: 109/200, Training Loss: 2.394, Train accuracy 0.343 Validation Loss: 1.966, Validation accuracy 0.435\nEpoch: 110/200, Training Loss: 2.400, Train accuracy 0.342 Validation Loss: 1.973, Validation accuracy 0.445\nEpoch: 111/200, Training Loss: 2.394, Train accuracy 0.343 Validation Loss: 1.938, Validation accuracy 0.438\nEpoch: 112/200, Training Loss: 2.393, Train accuracy 0.342 Validation Loss: 1.981, Validation accuracy 0.434\nEpoch: 113/200, Training Loss: 2.380, Train accuracy 0.346 Validation Loss: 1.956, Validation accuracy 0.441\nEpoch: 114/200, Training Loss: 2.385, Train accuracy 0.345 Validation Loss: 1.940, Validation accuracy 0.450\nEpoch: 115/200, Training Loss: 2.391, Train accuracy 0.342 Validation Loss: 1.967, Validation accuracy 0.444\nEpoch: 116/200, Training Loss: 2.381, Train accuracy 0.343 Validation Loss: 1.985, Validation accuracy 0.434\nEpoch: 117/200, Training Loss: 2.395, Train accuracy 0.342 Validation Loss: 2.006, Validation accuracy 0.432\nEpoch: 118/200, Training Loss: 2.375, Train accuracy 0.345 Validation Loss: 1.971, Validation accuracy 0.436\nEpoch: 119/200, Training Loss: 2.388, Train accuracy 0.347 Validation Loss: 1.948, Validation accuracy 0.445\nEpoch: 120/200, Training Loss: 2.377, Train accuracy 0.346 Validation Loss: 1.962, Validation accuracy 0.445\nEpoch: 121/200, Training Loss: 2.385, Train accuracy 0.341 Validation Loss: 1.975, Validation accuracy 0.437\nEpoch: 122/200, Training Loss: 2.380, Train accuracy 0.345 Validation Loss: 1.960, Validation accuracy 0.446\nEpoch: 123/200, Training Loss: 2.379, Train accuracy 0.344 Validation Loss: 1.957, Validation accuracy 0.443\nEpoch: 124/200, Training Loss: 2.377, Train accuracy 0.347 Validation Loss: 1.939, Validation accuracy 0.452\nEpoch: 125/200, Training Loss: 2.381, Train accuracy 0.345 Validation Loss: 1.949, Validation accuracy 0.449\nEpoch: 126/200, Training Loss: 2.373, Train accuracy 0.350 Validation Loss: 1.926, Validation accuracy 0.454\nEpoch: 127/200, Training Loss: 2.370, Train accuracy 0.345 Validation Loss: 1.954, Validation accuracy 0.440\nEpoch: 128/200, Training Loss: 2.378, Train accuracy 0.347 Validation Loss: 1.915, Validation accuracy 0.450\nEpoch: 129/200, Training Loss: 2.367, Train accuracy 0.349 Validation Loss: 1.956, Validation accuracy 0.448\nEpoch: 130/200, Training Loss: 2.377, Train accuracy 0.347 Validation Loss: 1.905, Validation accuracy 0.465\nNew minimum validation loss (saving model)\nEpoch: 131/200, Training Loss: 2.380, Train accuracy 0.343 Validation Loss: 1.973, Validation accuracy 0.432\nEpoch: 132/200, Training Loss: 2.374, Train accuracy 0.349 Validation Loss: 1.915, Validation accuracy 0.455\nEpoch: 133/200, Training Loss: 2.371, Train accuracy 0.347 Validation Loss: 1.948, Validation accuracy 0.448\nEpoch: 134/200, Training Loss: 2.378, Train accuracy 0.347 Validation Loss: 1.909, Validation accuracy 0.452\nEpoch: 135/200, Training Loss: 2.364, Train accuracy 0.350 Validation Loss: 1.965, Validation accuracy 0.445\nEpoch: 136/200, Training Loss: 2.365, Train accuracy 0.349 Validation Loss: 1.899, Validation accuracy 0.457\nNew minimum validation loss (saving model)\nEpoch: 137/200, Training Loss: 2.370, Train accuracy 0.349 Validation Loss: 1.930, Validation accuracy 0.438\nEpoch: 138/200, Training Loss: 2.361, Train accuracy 0.351 Validation Loss: 1.931, Validation accuracy 0.448\nEpoch: 139/200, Training Loss: 2.363, Train accuracy 0.349 Validation Loss: 1.901, Validation accuracy 0.456\nEpoch: 140/200, Training Loss: 2.363, Train accuracy 0.350 Validation Loss: 1.962, Validation accuracy 0.448\nEpoch: 141/200, Training Loss: 2.358, Train accuracy 0.350 Validation Loss: 1.954, Validation accuracy 0.446\nEpoch: 142/200, Training Loss: 2.368, Train accuracy 0.349 Validation Loss: 1.922, Validation accuracy 0.449\nEpoch: 143/200, Training Loss: 2.366, Train accuracy 0.347 Validation Loss: 1.933, Validation accuracy 0.444\nEpoch: 144/200, Training Loss: 2.360, Train accuracy 0.350 Validation Loss: 1.899, Validation accuracy 0.454\nNew minimum validation loss (saving model)\nEpoch: 145/200, Training Loss: 2.359, Train accuracy 0.350 Validation Loss: 1.857, Validation accuracy 0.465\nNew minimum validation loss (saving model)\nEpoch: 146/200, Training Loss: 2.367, Train accuracy 0.351 Validation Loss: 1.977, Validation accuracy 0.435\nEpoch: 147/200, Training Loss: 2.351, Train accuracy 0.352 Validation Loss: 1.945, Validation accuracy 0.445\nEpoch: 148/200, Training Loss: 2.364, Train accuracy 0.349 Validation Loss: 1.910, Validation accuracy 0.452\nEpoch: 149/200, Training Loss: 2.350, Train accuracy 0.353 Validation Loss: 1.941, Validation accuracy 0.442\nEpoch: 150/200, Training Loss: 2.361, Train accuracy 0.352 Validation Loss: 1.911, Validation accuracy 0.454\nEpoch: 151/200, Training Loss: 2.358, Train accuracy 0.352 Validation Loss: 1.928, Validation accuracy 0.453\nEpoch: 152/200, Training Loss: 2.357, Train accuracy 0.352 Validation Loss: 1.923, Validation accuracy 0.446\nEpoch: 153/200, Training Loss: 2.354, Train accuracy 0.352 Validation Loss: 1.959, Validation accuracy 0.442\nEpoch: 154/200, Training Loss: 2.355, Train accuracy 0.351 Validation Loss: 1.910, Validation accuracy 0.451\nEpoch: 155/200, Training Loss: 2.355, Train accuracy 0.351 Validation Loss: 1.946, Validation accuracy 0.447\nEpoch: 156/200, Training Loss: 2.352, Train accuracy 0.352 Validation Loss: 1.934, Validation accuracy 0.445\nEpoch: 157/200, Training Loss: 2.350, Train accuracy 0.354 Validation Loss: 1.933, Validation accuracy 0.453\nEpoch: 158/200, Training Loss: 2.353, Train accuracy 0.351 Validation Loss: 1.905, Validation accuracy 0.466\nEpoch: 159/200, Training Loss: 2.344, Train accuracy 0.355 Validation Loss: 1.910, Validation accuracy 0.463\nEpoch: 160/200, Training Loss: 2.353, Train accuracy 0.352 Validation Loss: 1.911, Validation accuracy 0.450\nEpoch: 161/200, Training Loss: 2.364, Train accuracy 0.350 Validation Loss: 1.914, Validation accuracy 0.455\nEpoch: 162/200, Training Loss: 2.346, Train accuracy 0.353 Validation Loss: 1.927, Validation accuracy 0.447\nEpoch: 163/200, Training Loss: 2.351, Train accuracy 0.353 Validation Loss: 1.893, Validation accuracy 0.463\nEpoch: 164/200, Training Loss: 2.350, Train accuracy 0.351 Validation Loss: 1.922, Validation accuracy 0.443\nEpoch: 165/200, Training Loss: 2.354, Train accuracy 0.354 Validation Loss: 1.913, Validation accuracy 0.455\nEpoch: 166/200, Training Loss: 2.355, Train accuracy 0.352 Validation Loss: 1.899, Validation accuracy 0.452\nEpoch: 167/200, Training Loss: 2.347, Train accuracy 0.352 Validation Loss: 1.957, Validation accuracy 0.445\nEpoch: 168/200, Training Loss: 2.344, Train accuracy 0.352 Validation Loss: 1.932, Validation accuracy 0.453\nEpoch: 169/200, Training Loss: 2.347, Train accuracy 0.353 Validation Loss: 1.950, Validation accuracy 0.441\nEpoch: 170/200, Training Loss: 2.341, Train accuracy 0.354 Validation Loss: 1.939, Validation accuracy 0.438\nEpoch: 171/200, Training Loss: 2.349, Train accuracy 0.351 Validation Loss: 1.920, Validation accuracy 0.451\nEpoch: 172/200, Training Loss: 2.347, Train accuracy 0.353 Validation Loss: 1.929, Validation accuracy 0.454\nEpoch: 173/200, Training Loss: 2.352, Train accuracy 0.355 Validation Loss: 1.918, Validation accuracy 0.446\nEpoch: 174/200, Training Loss: 2.358, Train accuracy 0.352 Validation Loss: 1.904, Validation accuracy 0.448\nEpoch: 175/200, Training Loss: 2.349, Train accuracy 0.354 Validation Loss: 1.868, Validation accuracy 0.466\nEpoch: 176/200, Training Loss: 2.348, Train accuracy 0.355 Validation Loss: 1.923, Validation accuracy 0.449\nEpoch: 177/200, Training Loss: 2.346, Train accuracy 0.353 Validation Loss: 1.869, Validation accuracy 0.459\nEpoch: 178/200, Training Loss: 2.344, Train accuracy 0.354 Validation Loss: 1.932, Validation accuracy 0.452\nEpoch: 179/200, Training Loss: 2.346, Train accuracy 0.355 Validation Loss: 1.958, Validation accuracy 0.451\nEpoch: 180/200, Training Loss: 2.337, Train accuracy 0.355 Validation Loss: 1.902, Validation accuracy 0.466\nEpoch: 181/200, Training Loss: 2.332, Train accuracy 0.355 Validation Loss: 1.920, Validation accuracy 0.447\nEpoch: 182/200, Training Loss: 2.341, Train accuracy 0.355 Validation Loss: 1.873, Validation accuracy 0.454\nEpoch: 183/200, Training Loss: 2.358, Train accuracy 0.352 Validation Loss: 1.884, Validation accuracy 0.462\nEpoch: 184/200, Training Loss: 2.343, Train accuracy 0.356 Validation Loss: 1.919, Validation accuracy 0.444\nEpoch: 185/200, Training Loss: 2.337, Train accuracy 0.356 Validation Loss: 1.867, Validation accuracy 0.464\nEpoch: 186/200, Training Loss: 2.338, Train accuracy 0.354 Validation Loss: 1.897, Validation accuracy 0.456\nEpoch: 187/200, Training Loss: 2.324, Train accuracy 0.359 Validation Loss: 1.900, Validation accuracy 0.461\nEpoch: 188/200, Training Loss: 2.339, Train accuracy 0.354 Validation Loss: 1.937, Validation accuracy 0.449\nEpoch: 189/200, Training Loss: 2.330, Train accuracy 0.356 Validation Loss: 1.895, Validation accuracy 0.454\nEpoch: 190/200, Training Loss: 2.340, Train accuracy 0.354 Validation Loss: 1.868, Validation accuracy 0.463\nEpoch: 191/200, Training Loss: 2.337, Train accuracy 0.356 Validation Loss: 1.943, Validation accuracy 0.446\nEpoch: 192/200, Training Loss: 2.341, Train accuracy 0.355 Validation Loss: 1.920, Validation accuracy 0.461\nEpoch: 193/200, Training Loss: 2.342, Train accuracy 0.353 Validation Loss: 1.887, Validation accuracy 0.457\nEpoch: 194/200, Training Loss: 2.333, Train accuracy 0.362 Validation Loss: 1.898, Validation accuracy 0.457\nEpoch: 195/200, Training Loss: 2.342, Train accuracy 0.357 Validation Loss: 1.898, Validation accuracy 0.452\nStopping early at epoch: 195/200\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
4a6251c55870d8eb093dd81dc9accdcfcaa4819d
| 798,435 |
ipynb
|
Jupyter Notebook
|
3. Facial Keypoint Detection, Complete Pipeline.ipynb
|
imsaksham-c/FacialLandmarks
|
02889394836e339ad963b0edec74fe2694911a47
|
[
"MIT"
] | null | null | null |
3. Facial Keypoint Detection, Complete Pipeline.ipynb
|
imsaksham-c/FacialLandmarks
|
02889394836e339ad963b0edec74fe2694911a47
|
[
"MIT"
] | 4 |
2021-06-08T22:48:19.000Z
|
2022-03-12T00:30:56.000Z
|
3. Facial Keypoint Detection, Complete Pipeline.ipynb
|
imsaksham-c/FacialLandmarks
|
02889394836e339ad963b0edec74fe2694911a47
|
[
"MIT"
] | null | null | null | 2,327.798834 | 323,020 | 0.961068 |
[
[
[
"## Face and Facial Keypoint detection\n\nAfter you've trained a neural network to detect facial keypoints, you can then apply this network to *any* image that includes faces. The neural network expects a Tensor of a certain size as input and, so, to detect any face, you'll first have to do some pre-processing.\n\n1. Detect all the faces in an image using a face detector (we'll be using a Haar Cascade detector in this notebook).\n2. Pre-process those face images so that they are grayscale, and transformed to a Tensor of the input size that your net expects. This step will be similar to the `data_transform` you created and applied in Notebook 2, whose job was tp rescale, normalize, and turn any iimage into a Tensor to be accepted as input to your CNN.\n3. Use your trained model to detect facial keypoints on the image.\n\n---",
"_____no_output_____"
],
[
"In the next python cell we load in required libraries for this section of the project.",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\n%matplotlib inline",
"_____no_output_____"
]
],
[
[
"#### Select an image \n\nSelect an image to perform facial keypoint detection on; you can select any image of faces in the `images/` directory.",
"_____no_output_____"
]
],
[
[
"import cv2\n# load in color image for face detection\nimage = cv2.imread('images/obamas.jpg')\n\n# switch red and blue color channels \n# --> by default OpenCV assumes BLUE comes first, not RED as in many images\nimage = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n\n# plot the image\nfig = plt.figure(figsize=(9,9))\nplt.imshow(image)",
"_____no_output_____"
]
],
[
[
"## Detect all faces in an image\n\nNext, you'll use one of OpenCV's pre-trained Haar Cascade classifiers, all of which can be found in the `detector_architectures/` directory, to find any faces in your selected image.\n\nIn the code below, we loop over each face in the original image and draw a red square on each face (in a copy of the original image, so as not to modify the original). You can even [add eye detections](https://docs.opencv.org/3.4.1/d7/d8b/tutorial_py_face_detection.html) as an *optional* exercise in using Haar detectors.\n\nAn example of face detection on a variety of images is shown below.\n\n<img src='images/haar_cascade_ex.png' width=80% height=80%/>\n",
"_____no_output_____"
]
],
[
[
"# load in a haar cascade classifier for detecting frontal faces\nface_cascade = cv2.CascadeClassifier('detector_architectures/haarcascade_frontalface_default.xml')\n\n# run the detector\n# the output here is an array of detections; the corners of each detection box\n# if necessary, modify these parameters until you successfully identify every face in a given image\nfaces = face_cascade.detectMultiScale(image, 1.2, 2)\n\n# make a copy of the original image to plot detections on\nimage_with_detections = image.copy()\n\n# loop over the detected faces, mark the image where each face is found\nfor (x,y,w,h) in faces:\n # draw a rectangle around each detected face\n # you may also need to change the width of the rectangle drawn depending on image resolution\n cv2.rectangle(image_with_detections,(x,y),(x+w,y+h),(255,0,0),3) \n\nfig = plt.figure(figsize=(9,9))\n\nplt.imshow(image_with_detections)",
"_____no_output_____"
]
],
[
[
"## Loading in a trained model\n\nOnce you have an image to work with (and, again, you can select any image of faces in the `images/` directory), the next step is to pre-process that image and feed it into your CNN facial keypoint detector.\n\nFirst, load your best model by its filename.",
"_____no_output_____"
]
],
[
[
"import torch\nfrom models import Net\n\nnet = Net()\n\n## TODO: load the best saved model parameters (by your path name)\n## You'll need to un-comment the line below and add the correct name for *your* saved model\nnet.load_state_dict(torch.load('saved_models/keypoints_model_1.pt'))\n\n## print out your net and prepare it for testing (uncomment the line below)\nnet.eval()",
"_____no_output_____"
]
],
[
[
"## Keypoint detection\n\nNow, we'll loop over each detected face in an image (again!) only this time, you'll transform those faces in Tensors that your CNN can accept as input images.\n\n### TODO: Transform each detected face into an input Tensor\n\nYou'll need to perform the following steps for each detected face:\n1. Convert the face from RGB to grayscale\n2. Normalize the grayscale image so that its color range falls in [0,1] instead of [0,255]\n3. Rescale the detected face to be the expected square size for your CNN (224x224, suggested)\n4. Reshape the numpy image into a torch image.\n\n**Hint**: The sizes of faces detected by a Haar detector and the faces your network has been trained on are of different sizes. If you find that your model is generating keypoints that are too small for a given face, try adding some padding to the detected `roi` before giving it as input to your model.\n\nYou may find it useful to consult to transformation code in `data_load.py` to help you perform these processing steps.\n\n\n### TODO: Detect and display the predicted keypoints\n\nAfter each face has been appropriately converted into an input Tensor for your network to see as input, you can apply your `net` to each face. The ouput should be the predicted the facial keypoints. These keypoints will need to be \"un-normalized\" for display, and you may find it helpful to write a helper function like `show_keypoints`. You should end up with an image like the following with facial keypoints that closely match the facial features on each individual face:\n\n<img src='images/michelle_detected.png' width=30% height=30%/>\n\n\n",
"_____no_output_____"
]
],
[
[
"image_copy = np.copy(image)\n\n# loop over the detected faces from your haar cascade\nfor (x,y,w,h) in faces:\n \n # Select the region of interest that is the face in the image \n roi = image_copy[y-50:y+h+50, x-50:x+w+50]\n \n ## TODO: Convert the face region from RGB to grayscale\n gray = cv2.cvtColor(roi, cv2.COLOR_RGB2GRAY)\n\n ## TODO: Normalize the grayscale image so that its color range falls in [0,1] instead of [0,255]\n gray = gray/255.0\n \n ## TODO: Rescale the detected face to be the expected square size for your CNN (224x224, suggested)\n gray = cv2.resize(gray, (224,224))\n \n ## TODO: Reshape the numpy image shape (H x W x C) into a torch image shape (C x H x W)\n roi_copy = torch.from_numpy(gray.reshape(1,1,224,224))\n roi_copy = roi_copy.type(torch.FloatTensor)\n \n ## TODO: Make facial keypoint predictions using your loaded, trained network \n output = net(roi_copy)\n\n ## TODO: Display each detected face and the corresponding keypoints \n torch.squeeze(output) \n output = output.view(68, -1)\n predicted = output.data.numpy()\n predicted = predicted*50.0+100\n \n plt.imshow(gray, cmap='gray')\n plt.scatter(predicted[:, 0], predicted[:, 1], s=60, marker='.', c='g')\n \n plt.show()\n \n",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a62607d1b405804b142adc22282f815e805b382
| 100,993 |
ipynb
|
Jupyter Notebook
|
notebooks/save_normalized_wav.ipynb
|
thibault-djaballah/multrigger-word
|
1a60e57057ad5bb78efd6e38bd3efc5f6f26ee3d
|
[
"MIT"
] | null | null | null |
notebooks/save_normalized_wav.ipynb
|
thibault-djaballah/multrigger-word
|
1a60e57057ad5bb78efd6e38bd3efc5f6f26ee3d
|
[
"MIT"
] | null | null | null |
notebooks/save_normalized_wav.ipynb
|
thibault-djaballah/multrigger-word
|
1a60e57057ad5bb78efd6e38bd3efc5f6f26ee3d
|
[
"MIT"
] | null | null | null | 83.534326 | 33,096 | 0.523838 |
[
[
[
"import pydub\nimport glob\nimport os\nimport sys",
"_____no_output_____"
],
[
"os.getcwd()",
"_____no_output_____"
],
[
"sys.path.append(\"/\".join(os.getcwd().split(\"/\")[:-1]))",
"_____no_output_____"
],
[
"from src.utils.audio import match_target_amplitude",
"_____no_output_____"
],
[
"audio_files = glob.glob(\"../data/raw/*/*/*.wav\")\naudio_file = audio_files[19]\naudio_segment = pydub.AudioSegment.from_wav(audio_file)",
"_____no_output_____"
],
[
"match_target_amplitude(audio_segment, -20).export(\"lol.wav\", format=\"wav\")",
"_____no_output_____"
],
[
"import tensorflow as tf\ntf.enable_eager_execution()",
"_____no_output_____"
],
[
"positive = tf.read_file(\"lol.wav\")\npositive = tf.contrib.ffmpeg.decode_audio(positive, file_format='wav', samples_per_second=16000, channel_count=1)",
"_____no_output_____"
],
[
"positive.numpy()",
"_____no_output_____"
],
[
"import pathlib",
"_____no_output_____"
],
[
"pydub.AudioSegment.silent().get_array_of_samples()",
"_____no_output_____"
],
[
"glob.glob(\"../data/raw/**/*.wav\", recursive=True)",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a62651485e8c8f6c5d9fadaf9350c6b5ba082b8
| 173,590 |
ipynb
|
Jupyter Notebook
|
files/Day1_2.ipynb
|
caltechlibrary/2020-03-09-Python-Workshop
|
5cfd6960eca83515bfb79e86a93494e8328146fa
|
[
"CC-BY-4.0"
] | null | null | null |
files/Day1_2.ipynb
|
caltechlibrary/2020-03-09-Python-Workshop
|
5cfd6960eca83515bfb79e86a93494e8328146fa
|
[
"CC-BY-4.0"
] | null | null | null |
files/Day1_2.ipynb
|
caltechlibrary/2020-03-09-Python-Workshop
|
5cfd6960eca83515bfb79e86a93494e8328146fa
|
[
"CC-BY-4.0"
] | null | null | null | 140.218094 | 32,368 | 0.884268 |
[
[
[
"2+2",
"_____no_output_____"
],
[
"answer = 2+2",
"_____no_output_____"
],
[
"print(answer)",
"4\n"
],
[
"new_variable = 9",
"_____no_output_____"
],
[
"new_variable = 6\nprint(new_variable)",
"6\n"
]
],
[
[
"This ia markdown cell\n\n# This is a heading\n\nMy program is awesome",
"_____no_output_____"
]
],
[
[
"import numpy",
"_____no_output_____"
],
[
"data = numpy.loadtxt(fname='data/inflammation-01.csv',delimiter=',')",
"_____no_output_____"
],
[
"print(data)",
"[[0. 0. 1. ... 3. 0. 0.]\n [0. 1. 2. ... 1. 0. 1.]\n [0. 1. 1. ... 2. 1. 1.]\n ...\n [0. 1. 1. ... 1. 1. 1.]\n [0. 0. 0. ... 0. 2. 0.]\n [0. 0. 1. ... 1. 1. 0.]]\n"
],
[
"print(type(data[0,0]))",
"<class 'numpy.float64'>\n"
],
[
"print(data.dtype)",
"float64\n"
],
[
"print(data.shape)",
"(60, 40)\n"
],
[
"print('first value in data:', data[0,0])",
"first value in data: 0.0\n"
],
[
"print('middle value in data:',data[30,20])",
"middle value in data: 13.0\n"
],
[
"print(data[0:5,0:10])",
"[[0. 0. 1. 3. 1. 2. 4. 7. 8. 3.]\n [0. 1. 2. 1. 2. 1. 3. 2. 2. 6.]\n [0. 1. 1. 3. 3. 2. 6. 2. 5. 9.]\n [0. 0. 2. 0. 4. 2. 2. 1. 6. 7.]\n [0. 1. 1. 3. 3. 1. 3. 5. 2. 4.]]\n"
],
[
"print(data[4:8,0:10])",
"[[0. 1. 1. 3. 3. 1. 3. 5. 2. 4.]\n [0. 0. 1. 2. 2. 4. 2. 1. 6. 4.]\n [0. 0. 2. 2. 4. 2. 2. 5. 5. 8.]\n [0. 0. 1. 2. 3. 1. 2. 3. 5. 3.]]\n"
],
[
"print(data[:3, 36:])",
"[[2. 3. 0. 0.]\n [1. 1. 0. 1.]\n [2. 2. 1. 1.]]\n"
],
[
"print(numpy.mean(data))",
"6.14875\n"
],
[
"maxval,minval,stdval = numpy.max(data),numpy.min(data),numpy.std(data)",
"_____no_output_____"
],
[
"print('maximum inflammation: ',maxval)\nprint('minimum inflammation: ',minval)\nprint('standard deviation: ',stdval)",
"maximum inflammation: 20.0\nminimum inflammation: 0.0\nstandard deviation: 4.613833197118566\n"
],
[
"patient_0 = data[0,:]",
"_____no_output_____"
],
[
"print('Maximum inflammation for patient 0:',numpy.max(patient_0))",
"Maximum inflammation for patient 0: 18.0\n"
],
[
"print(numpy.mean(data,axis=1).shape)",
"(60,)\n"
],
[
"element = 'oxy gen'\nprint('first three characters:', element[0:3])",
"first three characters: oxy\n"
],
[
"print(element[:4])\nprint(element[4:])\nprint(element[:])",
"oxy \ngen\noxy gen\n"
],
[
"print(element[-1])\nprint(element[-2])",
"n\ne\n"
],
[
"print(element[1:-1])",
"xy ge\n"
],
[
"patient3_week1 = data[3,0:7]\nprint(patient3_week1)\ndiff_values = numpy.diff(patient3_week1)\nprint(diff_values)",
"[0. 0. 2. 0. 4. 2. 2.]\n[ 0. 2. -2. 4. -2. 0.]\n"
],
[
"diff_values = numpy.diff(data,axis=1)\ndiff_values.shape",
"_____no_output_____"
],
[
"import matplotlib.pyplot\nimage = matplotlib.pyplot.imshow(data)",
"_____no_output_____"
],
[
"ave_inflammation = numpy.min(data,axis=0)\nave_plot = matplotlib.pyplot.plot(ave_inflammation)",
"_____no_output_____"
],
[
"figure = matplotlib.pyplot.figure(figsize=(10.0,3.0))\n\naxes1 = figure.add_subplot(1,3,1)\naxes2 = figure.add_subplot(1,3,2)\naxes3 = figure.add_subplot(1,3,3)\n\naxes1.set_ylabel('average')\naxes2.set_ylabel('max')\naxes3.set_ylabel('min')\n\naxes1.plot(numpy.mean(data, axis=0))\naxes2.plot(numpy.max(data, axis=0))\naxes3.plot(numpy.min(data, axis=0))\n\nfigure.tight_layout()\n",
"_____no_output_____"
],
[
"'String'.startswith(\"Str\")",
"_____no_output_____"
],
[
"'String'.startswith(\"str\")",
"_____no_output_____"
],
[
"import glob\n\npath = 'data/'\nfilenames = sorted(glob.glob(path+'*'))\nlarge_files = [] #Starts with inflammation\nsmall_files = [] #Starts with small\nother_files = []\n\nfor filename in filenames:\n if filename.startswith(path+\"inflammation\"):\n large_files.append(filename)\n elif filename.startswith(path+'small'):\n small_files.append(filename)\n else:\n other_files.append(filename)\n \nprint(large_files)\nprint(small_files)\nprint(other_files)",
"['data/inflammation-01.csv', 'data/inflammation-02.csv', 'data/inflammation-03.csv', 'data/inflammation-04.csv', 'data/inflammation-05.csv', 'data/inflammation-06.csv', 'data/inflammation-07.csv', 'data/inflammation-08.csv', 'data/inflammation-09.csv', 'data/inflammation-10.csv', 'data/inflammation-11.csv', 'data/inflammation-12.csv']\n['data/small-01.csv', 'data/small-02.csv', 'data/small-03.csv']\n['data/plots.csv', 'data/species.csv', 'data/surveys.csv']\n"
],
[
"ave_inflammation = numpy.min(data,axis=0)\nave_plot = matplotlib.pyplot.plot(ave_inflammation)",
"_____no_output_____"
],
[
"ave_inflammation = numpy.min(data,axis=0)\nave_plot = matplotlib.pyplot.plot(ave_inflammation)",
"_____no_output_____"
],
[
"def fahr_to_celsius(temp):\n return ((temp - 32)*(5/9))",
"_____no_output_____"
],
[
"fahr_to_celsius(32)",
"_____no_output_____"
],
[
"print('freezing point of water:', fahr_to_celsius(32), 'C')\nprint('boiling point of water:',fahr_to_celsius(212), 'C')",
"freezing point of water: 0.0 C\nboiling point of water: 100.0 C\n"
],
[
"def celsius_to_kelvin(temp_c):\n return temp_c + 273.15\n\nprint('freezing point of water in Kelvin:',celsius_to_kelvin(0))",
"freezing point of water in Kelvin: 273.15\n"
],
[
"def fahr_to_kelvin(temp_f,message):\n temp_c = fahr_to_celsius(temp_f)\n print(temp_c)\n temp_k = celsius_to_kelvin(temp_c)\n print(message)\n return temp_c,temp_k\n\n\n\ntemp_c,temp_k = fahr_to_kelvin(212.0,\"Message text\")\nprint(temp_c,temp_k)",
"100.0\nMessage text\n100.0 373.15\n"
],
[
"def visualize(filename): #Take a filename and make plots\n \n data = numpy.loadtxt(fname=filename,delimiter = ',')\n \n figure = matplotlib.pyplot.figure(figsize=(10.0,3.0))\n\n axes1 = figure.add_subplot(1,3,1)\n axes2 = figure.add_subplot(1,3,2)\n axes3 = figure.add_subplot(1,3,3)\n\n axes1.set_ylabel('average')\n axes2.set_ylabel('max')\n axes3.set_ylabel('min')\n\n axes1.plot(numpy.mean(data, axis=0))\n axes2.plot(numpy.max(data, axis=0))\n axes3.plot(numpy.min(data, axis=0))\n\n figure.tight_layout()\n matplotlib.pyplot.show()\n \ndef detect_problems(filename): #Take a filename; Conditional example\n data=numpy.loadtxt(fname=filename,delimiter=',')\n if numpy.max(data,axis=0)[0] == 0 and numpy.max(data,axis=0)[20] == 20:\n print('Suspicious looking maxima')\n elif numpy.sum(numpy.min(data,axis=0)) ==0:\n print('Minima add up to zero')\n else:\n print('Seems OK!')",
"_____no_output_____"
],
[
"filenames = sorted(glob.glob('data/inflammation*.csv'))\n\nfor filename in filenames[:3]:\n print(filename)\n visualize(filename)\n detect_problems(filename)",
"data/inflammation-01.csv\n"
],
[
"def offset_mean(data,target_mean_value=0,message='Message text'):\n '''Returns a new array containing the origional data\n with its mean offset to match the desired value\n \n target_mean_value defaults to zero\n '''\n print(message)\n return(data-numpy.mean(data)) + target_mean_value",
"_____no_output_____"
],
[
"z = numpy.zeros((2,2))\nprint(offset_mean(z,3,'new_message'))\nprint(offset_mean(z,message='new text'))",
"new_message\n[[3. 3.]\n [3. 3.]]\nnew text\n[[0. 0.]\n [0. 0.]]\n"
],
[
"help(offset_mean)",
"Help on function offset_mean in module __main__:\n\noffset_mean(data, target_mean_value)\n Returns a new array containing the origional data\n with its mean offset to match the desired value\n\n"
],
[
"data = numpy.loadtxt(fname='data/inflammation-01.csv',delimiter=',')\nprint(offset_mean(data,0))",
"[[-6.14875 -6.14875 -5.14875 ... -3.14875 -6.14875 -6.14875]\n [-6.14875 -5.14875 -4.14875 ... -5.14875 -6.14875 -5.14875]\n [-6.14875 -5.14875 -5.14875 ... -4.14875 -5.14875 -5.14875]\n ...\n [-6.14875 -5.14875 -5.14875 ... -5.14875 -5.14875 -5.14875]\n [-6.14875 -6.14875 -6.14875 ... -6.14875 -4.14875 -6.14875]\n [-6.14875 -6.14875 -5.14875 ... -5.14875 -5.14875 -6.14875]]\n"
],
[
"print('origional min,mean,max',numpy.min(data),\n numpy.mean(data),numpy.max(data))\noffset_data= offset_mean(data,0)\nprint('offset min,mean,max',numpy.min(offset_data),\n numpy.mean(offset_data),numpy.max(offset_data))",
"origional min,mean,max 0.0 6.14875 20.0\noffset min,mean,max -6.14875 2.842170943040401e-16 13.85125\n"
],
[
"def outer(input_string):\n return input_string[0] + input_string[-1]\n\nprint(outer('hydrogen'))\n#hm",
"hn\n"
],
[
"numbers = [2,1.5,2,-7]\ntotal = 0\nfor num in numbers:\n assert num > 0, 'Data should only contain positive values'\n total += num\nprint('total is:', total)",
"_____no_output_____"
],
[
"def normalize_rectangle(rect):\n '''Normalize a rectangle so that it is at the origin\n and is one unit long in its long axis\n Input should be of the format (x0,y0,x1,y1)\n x0,y0 and x1,y1 define the lower left and upper right corners'''\n assert len(rect) == 4, 'Rectangles must contain 4 coordinates'\n x0,y0,x1,y1 = rect\n assert x0 < x1, 'Invalid X coordinates'\n assert y0 < y1, ' Invalid Y coordinates'\n \n dx = x1-x0\n dy = y1-y0\n if dx > dy:\n scaled = float(dx)/dy\n upper_x,upper_y = 1.0,scaled\n else:\n scaled = float(dx)/dy\n upper_x,upper_y = scaled, 1.0\n \n assert 0 < upper_x <= 1.0, 'Calculated upper X coordinate invalid'\n assert 0 < upper_y <= 1.0, ' Calculated upper Y coordinate invalid'\n \n return (0,0,upper_x,upper_y)",
"_____no_output_____"
],
[
"print(normalize_rectangle((0.0,1.0,2.0,)))",
"_____no_output_____"
],
[
"print(normalize_rectangle([4.0,2.0,1.0,5.0]))",
"_____no_output_____"
],
[
"print(normalize_rectangle((0.0,0.0,1.0,5.0)))",
"(0, 0, 0.2, 1.0)\n"
],
[
"print(normalize_rectangle((0.0,0.0,5.0,1.0)))",
"_____no_output_____"
],
[
"assert range_overlap([(0,1)]) == (0,1)\nassert range_overlap([(2,3),(2,4)]) == (2,3)\nassert range_overlap([(0,1),(0,2),(-1,1)]) == (0,1)\nassert range_overlap([(0,1),(5,6)]) == None\nassert range_overlap([(0,1),(1,2)]) == None",
"_____no_output_____"
],
[
"def range_overlap(ranges):\n '''Return common overlap among a set of [(left,right)] ranges'''\n initial = ranges[0]\n max_left = initial[0]\n min_right = initial[1]\n for (left,right) in ranges:\n max_left = max(max_left,left)\n min_right = min(min_right,right)\n if max_left<min_right:\n return(max_left,min_right)\n else:\n return None",
"_____no_output_____"
],
[
"range_overlap([(0,1),(5,6)])",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a626567918efed9d01ef40450244c547a651553
| 40,567 |
ipynb
|
Jupyter Notebook
|
doc/nb/pre-processing.ipynb
|
profxj/desi_sandbox
|
c03325f698f10be415215f7e3063c613ad5a79e4
|
[
"BSD-3-Clause"
] | null | null | null |
doc/nb/pre-processing.ipynb
|
profxj/desi_sandbox
|
c03325f698f10be415215f7e3063c613ad5a79e4
|
[
"BSD-3-Clause"
] | 2 |
2020-08-03T19:53:26.000Z
|
2020-08-03T21:39:13.000Z
|
doc/nb/pre-processing.ipynb
|
profxj/desi_sandbox
|
c03325f698f10be415215f7e3063c613ad5a79e4
|
[
"BSD-3-Clause"
] | null | null | null | 59.482405 | 1,370 | 0.637439 |
[
[
[
"import numpy as np\nimport matplotlib.pyplot as plt\nimport pandas as pd\n\nfrom numpy.linalg import inv\n\nfrom astropy.table import Table, Column, vstack, hstack, unique, SortedArray,SCEngine\nimport astropy.units as u\n\nfrom astropy.io import fits, ascii \n\nimport glob\nimport os\n\nimport numpy\nfrom scipy.signal import medfilt\nfrom scipy.interpolate import interp1d\nfrom numba import njit\nimport tqdm\nfrom pandas import DataFrame ",
"_____no_output_____"
],
[
"# read in high S/N table \n# loop of over the rows \n# Night and Date to make spectra file name: PETAL_LOC, NIGHT, TILEID\n# load hdul\n# cut r camera, \"r_wavelength\" \n# Grab wavelength \n# Grab corresponding spectrum \n# FIBER #",
"_____no_output_____"
],
[
"highSN = Table.read(\"/Volumes/GoogleDrive/My Drive/HighS_N.fits\") # reads in table from previous code\n\nPETAL = (highSN['PETAL_LOC'].data) # load each column as an array\nNIGHT = (highSN['NIGHT'].data) # load each column as an array\nTILEID = (highSN['TILEID'].data) # load each column as an array\n\ncombined = np.vstack((PETAL, TILEID,NIGHT)).T # combines each element of each array together \nprint(combined)",
"[[ 3 63075 20200305]\n [ 1 63075 20200305]\n [ 6 63075 20200305]\n ...\n [ 7 63075 20200219]\n [ 5 63159 20200314]\n [ 8 63159 20200314]]\n"
],
[
"tileid = [] # creates an empty array\nfor row in highSN:\n file = str(row['PETAL_LOC']) + '-' + str(row['TILEID']) + '-' + str(row['NIGHT']) # goes through each element of the array and grabs the wanted elements of each one. This then combines them in the right format \n file_tileid = str(row['TILEID']) + '/' + str(row['NIGHT']) + '/coadd-' + file # this grabs the necessary element of each array and combines them to make part of our path in the next cell\n tileid.append(file_tileid) # appends the file created above to them empty array\n# print(file)",
"_____no_output_____"
],
[
"file = ['/Volumes/GoogleDrive/My Drive/andes (1)/tiles/' + x +'.fits' for x in tileid] # this combines all of the elements grabbed above to make a filepath\n",
"_____no_output_____"
],
[
"# for x,y in zip(list1,list2):",
"_____no_output_____"
],
[
"for i in range(2843,3843):\n\n hdul = fits.open(file[i]) # opens the fit data that belongs to the i sub_file and gets the information from that file\n\n r_wave = (hdul['R_WAVELENGTH'].data) # Takes the chosen row of the hdul file\n r_flux = (hdul['R_FLUX'].data) # Takes the chosen row of the hdul file\n r_ivar = (hdul['R_IVAR'].data) # Takes the chosen row of the hdul file\n \n FIBER = (highSN['FIBER'].data) # Takes the chosen row of the hdul file\n\n fibermap = hdul['FIBERMAP'].data # Takes the chosen row of the hdul file\n fibers = fibermap['FIBER'] \n\n# print(FIBER[i]) # prints each element of FIBER \n index = (np.where(np.in1d(fibers, FIBER[i]))) # prints which index is where fibers and FIBER matches\n# print(fibers[np.where(np.in1d(fibers, FIBER[i]))]) # plugs in the index to make sure this is where the number matches \n index_ = list(index[0]) # converts the first element of the tuple created and converts it to a list.\n# print(index_[0]) # prints the first element of the list \n \n \n rflux = r_flux[index_[0],:] # plugs in the index found above and finds the matching spectrum \n rivar = r_ivar[index_[0],:] # plugs in the index found above and finds the matching spectrum \n rwave = r_wave\n \n np.savez('/Volumes/GoogleDrive/My Drive/rflux.npz', rflux = rflux, overwrite = True) # saves the multiple arrays to one file \n np.savez('/Volumes/GoogleDrive/My Drive/rivar.npz', rivar = rivar, overwrite = True)\n np.savez('/Volumes/GoogleDrive/My Drive/rwave.npz', rwave = rwave, overwrite = True)\n \n# plt.title('Spectrum', fontsize = 15) # places a title and sets font size\n# plt.xlabel('Wavelength', fontsize = 15) # places a label on the x axis and sets font size \n# plt.ylabel('$\\\\mathrm{flux\\\\,[10^{-17}\\\\, erg \\\\, cm^{-2} \\\\, s^{-1} \\\\, \\\\AA^{-1}] }$', fontsize = 15) # places a label on the y axis and sets font size\n\n# plt.plot(r_wave, rflux) # plots the lists we just created using a function from matplotlib.pyplot. This plots both the x and y lists.\n# plt.show() ",
"_____no_output_____"
],
[
"# Check the IVAR array for 0 or negative values\n# Set the flux = `np.nan` for any pixels that have that IVAR <=0",
"_____no_output_____"
],
[
"from scipy.signal import medfilt\nfrom scipy.interpolate import interp1d\nfrom numba import njit\nimport tqdm\nimport matplotlib.pyplot as plt\n\n\nflux = np.load('/Volumes/GoogleDrive/My Drive/rflux.npz', allow_pickle=True)# Load the spectra (expecting a list of spectra, length of the list is the number of objects, \nwaves = np.load('/Volumes/GoogleDrive/My Drive/rwave.npz', allow_pickle=True)# Load the corresponding wavelength grid for each spectrum (also a list)\nivar = np.load('/Volumes/GoogleDrive/My Drive/rivar.npz', allow_pickle=True)\n\ns = 3800\ne = 7400\n# common_wave = numpy.exp(numpy.linspace(numpy.log(s), numpy.log(e), 4200)) # Define the wavelength grid you would like to work with\n\nflux = flux['rflux']\nwaves = waves['rwave']\nivar = ivar['rivar']\n\nbadpix = ivar <= 0\nflux[badpix] = np.nan\n\nprint(flux)\n\nnof_objects = len(flux)\n\n@njit\ndef remove_outliers_and_nans(flux, flux_): \n ###\n \n# Use nearby pixels to remove 3 sigma outlires and nans \n \n ##\n nof_features = flux.size\n d = 5\n for f in range(d,nof_features-d):\n val_flux = flux[f]\n leave_out = numpy.concatenate((flux[f-d:f],flux[f+1:f+d]))\n leave_out_mean = numpy.nanmean(leave_out)\n leave_out_std = numpy.nanstd(leave_out)\n\n if abs(val_flux - leave_out_mean) > 3*leave_out_std:\n flux_[f] = leave_out_mean\n\n d_ = d\n while not numpy.isfinite(flux_[f]):\n val_flux = flux[f]\n d_ = d_ + 1\n leave_out = numpy.concatenate((flux[f-d_:f],flux[f+1:f+d_]))\n leave_out_mean = numpy.nanmean(leave_out)\n flux_[f] = leave_out_mean\n\n return flux_\n\nspecs_same_grid = []\nfor wave, spec in zip(waves, flux):\n specs_same_grid += [numpy.interp(wave, flux)]\n \nspecs_same_grid = numpy.array(specs_same_grid)\nfor i in range(nof_objects):\n flux = flux.copy()\n flux_ = flux.copy()\n # remove outliers and nans\n specs_final[i] = remove_outliers_and_nans(flux, flux_)\n # 5 pixel median filter (to remove some of the noise)\n specs_final[i] = medfilt(specs_final[i], 5)\n\n# specs_same_grid = []\n# for wave, spec in zip(waves, specs):\n# specs_same_grid += [numpy.interp(common_wave, wave, spec)]\n# specs_same_grid = numpy.array(specs_same_grid)\n\n\n\n# specs_final = numpy.zeros(specs_same_grid.shape)\n# for i in range(nof_objects):\n# s = specs_same_grid[i].copy()\n# s_ = s.copy()\n# # remove outliers and nans\n# specs_final[i] = remove_outliers_and_nans(s, s_)\n# # 5 pixel median filter (to remove some of the noise)\n# specs_final[i] = medfilt(specs_final[i], 5)",
"[3231.3555 3915.4768 4149.006 4223.501 4091.3186 4027.7656 3704.3708\n 4500.7056 4260.331 3842.5964 4225.3174 4119.8506 3942.4368 4118.08\n 4418.948 3908.3147 4252.978 3900.7166 4178.6826 4210.8257 3896.883\n 4265.458 4104.7207 4148.7646 4077.026 4188.77 4113.0024 4166.0205\n 3924.6562 3920.2302 3898.8967 3893.431 4000.9026 3699.5115 3851.5542\n 3963.1167 3975.1147 4017.1743 3904.2192 4016.8894 4063.0276 3913.5576\n 3946.9807 4028.0637 4006.8662 3862.0903 3795.69 3999.597 4009.4072\n 3921.952 3924.6096 4120.185 3883.2444 3881.737 4111.4414 3888.4216\n 4009.4275 3741.055 3852.6938 3875.1938 3837.1343 3820.6184 3937.7256\n 3867.7546 4079.2808 4086.0327 3991.7134 3955.8452 3857.3455 3887.1226\n 3936.9827 3846.668 3908.7073 3903.5776 3845.258 3805.4407 3851.5452\n 3919.82 3954.954 3962.8103 3890.4797 3864.293 3865.4543 3874.0393\n 3903.508 3995.3325 4072.9885 3892.4666 3978.897 3876.5354 3838.0854\n 3844.4856 3918.4976 3990.2246 3904.735 3806.123 3921.9294 3852.3965\n 3776.2866 3811.204 3723.6616 3776.9983 3896.01 3868.4927 3906.5574\n 3872.535 3855.2366 3895.3787 3919.0486 3847.6394 3840.3032 3909.9346\n 3869.5576 3898.2961 3835.9536 3814.0208 3734.252 3891.7087 3852.0444\n 3807.2217 3891.1016 3724.3228 3682.386 3539.966 3725.7356 3713.097\n 3781.9941 3655.73 3775.0151 3699.7314 3823.1626 3774.7559 3814.7688\n 3865.3582 3787.9077 3802.4485 3731.5452 3826.134 3789.1963 3899.3293\n 3720.0974 3897.2876 3825.4333 3845.8555 3711.1714 3730.6406 3668.84\n 3765.5376 3804.653 3787.153 3883.2148 3715.8257 3774.0322 3789.9504\n 3770.4397 3801.1465 3819.8647 3880.511 3743.9436 3690.3657 3593.8462\n 3594.9045 3463.544 3376.575 3756.5227 4012.4253 3874.6438 3423.7505\n 3425.0215 3544.2869 3405.915 3564.5195 3968.4958 4058.383 3736.4175\n 3743.9634 3588.6733 3793.6707 3769.843 3851.6348 3809.3958 3765.7114\n 3711.4502 3825.1719 3765.5547 3848.2104 3761.2668 3772.2927 3763.887\n 3832.14 3734.1824 3790.9075 3723.7898 3634.0664 3666.291 3754.3398\n 3786.1335 3757.6501 3739.9954 3754.2444 3817.438 3779.6155 3766.7388\n 3793.5818 3787.741 3782.7524 3736.738 3756.944 3749.17 3718.6047\n 3787.7866 3666.099 3672.7703 3804.9302 3743.9717 3782.5457 3745.5469\n 3620.8323 3770.5078 3752.784 3699.2488 3825.6218 3859.661 3700.7449\n 3754.4338 3769.6077 3811.8809 3799.2578 3767.3342 3785.3022 3792.8254\n 3831.265 3821.286 3809.683 3746.175 3650.9312 3751.0564 3752.1396\n 3654.6597 3763.033 3834.23 3750.5808 3699.25 3670.1575 3779.6348\n 3622.5078 3816.9417 3713.4749 3686.335 3765.9407 3882.1455 3857.789\n 3913.8516 3733.0066 3843.606 3760.9072 3777.8813 3829.8638 3792.9272\n 3723.2898 3732.4133 3819.2021 3774.5925 3804.085 3836.0984 3725.9812\n 3707.5464 3793.8281 3752.3132 3756.3245 3660.3555 3653.4788 3626.9006\n 3734.8843 3654.764 3727.6145 3755.5635 3629.561 3641.4365 3684.999\n 3728.66 3668.4329 3698.8188 3669.4521 3693.3904 3645.7456 3651.0344\n 3757.6238 3807.0396 3777.6646 3758.3909 3721.4502 3703.1165 3678.1646\n 3709.1335 3689.29 3676.14 3672.332 3718.821 3683.1467 3717.7107\n 3689.2585 3757.4587 3663.1926 3682.0757 3650.385 3670.0193 3732.5046\n 3654.9524 3674.3518 3624.331 3681.9077 3661.9 3786.7234 3673.1782\n 3707.9377 3678.054 3574.8833 3633.9941 3749.9958 3680.841 3633.049\n 3612.0967 3647.153 3646.949 3557.136 3536.9263 3650.1113 3597.2651\n 3615.1548 3519.1658 3594.8354 3540.938 3656.0078 3616.06 3653.355\n 3702.4465 3682.2441 3636.5757 3616.868 3667.0474 3699.4187 3674.7996\n 3776.8486 3616.7502 3756.7024 3751.9377 3630.909 3694.562 3796.424\n 3721.322 3757.4695 3715.0845 3708.4734 3683.268 3786.361 3741.764\n 3739.1191 3614.1997 3688.5938 3677.8718 3754.6372 3708.6064 3692.838\n 3687.152 3627.8826 3694.598 3641.6912 3687.5125 3663.4734 3634.5408\n 3698.6091 3735.5293 3666.8342 3632.8152 3725.8237 3628.0283 3672.175\n 3639.535 3637.3022 3701.0876 3629.569 3724.862 3602.0205 3596.9944\n 3609.0444 3617.1655 3635.107 3628.1533 3619.1145 3651.6829 3702.7258\n 3650.9224 3662.896 3747.098 3552.9292 3616.0928 3639.5728 3630.9539\n 3668.7593 3648.8323 3683.0085 3583.869 3601.903 3539.3875 3657.172\n 3615.3738 3663.6165 3663.9307 3632.9194 3642.35 3620.009 3485.1116\n 3616.0662 3654.8647 3549.754 3629.966 3665.4414 3590.0151 3557.8372\n 3627.1482 3728.5542 3649.664 3843.6743 3667.658 3571.7058 3602.945\n 3477.9658 3489.5032 3430.36 3525.4104 3628.9702 3622.4014 3484.594\n 3540.501 3586.341 3633.668 3579.7466 3619.2449 3627.5957 3638.361\n 3676.7197 3585.7742 3640.2048 3566.6775 3610.4111 3651.6443 3635.1213\n 3634.4585 3568.9878 3549.0972 3445.5522 3562.423 3493.9272 3512.3027\n 3501.382 3590.5037 3531.3374 3524.1829 3534.2175 3611.9622 3525.783\n 3640.831 3618.9075 3547.186 3497.5537 3509.1025 3538.452 3560.741\n 3522.217 3446.6707 3400.833 3372.6235 3518.3826 3576.1787 3481.1748\n 3380.2097 3389.3389 3439.3018 3459.6309 3603.1045 3580.4902 3540.4194\n 3506.6147 3516.3403 3560.6304 3526.863 3692.2341 3609.0344 3620.389\n 3584.9695 3535.7595 3528.9812 3467.3665 3510.101 3427.4868 3547.0535\n 3473.04 3497.5718 3569.8142 3423.2913 3434.911 3391.6953 3308.0808\n 3234.3281 3341.0803 3362.691 3339.928 3395.6382 3383.8499 3406.8918\n 3475.2246 3428.7515 3392.302 3435.765 3438.7656 3498.8467 3442.1406\n 3461.7922 3543.6548 3540.3054 3592.3914 3586.2234 3565.4563 3533.4224\n 3612.8687 3557.2778 3519.7332 3633.2095 3638.7026 3561.9365 3509.5225\n 3603.4438 3571.0098 3618.7593 3563.5264 3541.899 3545.918 3560.742\n 3498.1475 3526.6912 3482.078 3578.088 3490.8967 3592.7666 3459.5981\n 3637.5798 3599.4915 3595.7031 3540.0813 3549.6772 3548.4531 3542.825\n 3552.5657 3501.1304 3619.6301 3637.6438 3670.3928 3564.3257 3638.4482\n 3573.957 3670.8113 3615.4377 3647.6023 3593.3552 3628.7493 3513.2913\n 3530.6482 3519.8079 3522.8591 3467.8442 3486.2341 3486.0076 3489.8767\n 3458.1702 3446.679 3522.5002 3463.037 3550.1416 3562.2498 3454.4805\n 3488.5166 3511.2444 3527.7507 3517.8118 3536.0876 3523.1357 3484.5215\n 3529.8853 3426.0713 3370.5527 3531.4492 3489.2346 3436.1064 3475.2476\n 3388.206 3471.0764 3498.386 3409.464 3424.9934 3481.709 3481.871\n 3425.954 3420.9138 3457.1018 3410.5205 3378.098 3302.8652 3351.618\n 3411.782 3454.3545 3444.509 3470.1863 3466.4324 3520.1223 3402.857\n 3397.4077 3395.5664 3351.6077 3298.2646 3377.762 3389.808 3401.7656\n 3453.4778 3365.6877 3413.0764 3440.6367 3465.1663 3500.5994 3471.4717\n 3510.375 3478.2578 3507.9753 3537.3738 3556.2458 3594.4978 3482.974\n 3425.7556 3502.743 3499.6587 3512.7515 3495.3179 3545.6138 3475.3826\n 3529.903 3503.3208 3512.6218 3563.5198 3537.587 3467.843 3535.769\n 3522.097 3512.0342 3490.336 3536.6208 3526.4626 3621.5515 3524.6704\n 3613.9849 3485.078 3586.3362 3446.4702 3498.2395 3474.8503 3532.236\n 3464.57 3464.916 3532.3723 3530.9387 3526.0168 3515.0576 3543.165\n 3477.775 3472.6155 3445.9802 3429.206 3417.0273 3407.997 3267.3047\n 3400.417 3472.0881 3534.0063 3390.097 3406.6 3389.6458 3413.1548\n 3475.3447 3455.2087 3465.568 3440.9805 3453.2896 3449.6414 3482.2185\n 3386.807 3391.64 3307.0564 3351.6235 3408.7847 3336.4707 3270.7988\n 3290.0422 3351.0007 3460.0105 3341.1482 3333.6885 3422.8608 3459.5132\n 3531.5364 3458.0315 3434.0146 3421.7583 3427.0408 3382.6912 3469.2122\n 3474.7358 3508.742 3472.1855 3443.8167 3414.1304 3367.7153 3344.2886\n 3326.7192 3398.206 3335.7878 3407.238 3385.4 3419.5857 3358.4668\n 3366.9763 3387.9365 3339.456 3311.8955 3324.5012 3400.7327 3415.51\n 3451.2976 3405.7224 3485.388 3435.0999 3482.1091 3414.4607 3403.1472\n 3446.4192 3437.5332 3442.8687 3370.611 3423.5034 3388.3552 3380.6953\n 3408.1873 3303.1667 3392.623 3376.1377 3254.7324 3283.518 3270.85\n 3377.9185 3297.8289 3401.5876 3349.0125 3379.997 3467.0288 3404.498\n 3451.789 3410.4253 3396.7312 3468.8857 3438.5159 3463.5327 3403.0571\n 3389.0369 3407.661 3441.4358 3438.7136 3363.9705 3450.5093 3407.8357\n 3370.1143 3432.9524 3432.3271 3439.7905 3447.884 3432.9604 3474.1523\n 3414.196 3436.9258 3389.0103 3377.597 3459.146 3385.3289 3433.5278\n 3391.4631 3288.6113 3359.7808 3369.4863 3461.0745 3369.851 3316.7803\n 3360.9758 3297.7236 3399.062 3355.2256 3341.7212 3424.3633 3401.505\n 3346.507 3296.8171 3353.3125 3330.2273 3401.3174 3424.7432 3325.2476\n 3325.9534 3368.4243 3314.9465 3327.1882 3335.393 3321.1338 3321.9785\n 3330.7114 3405.0613 3292.9846 3373.1038 3388.619 3414.907 3316.7651\n 3242.2283 3285.205 3342.6296 3333.091 3404.3086 3343.8801 3398.3613\n 3345.5698 3394.3196 3446.0073 3347.024 3352.089 3389.9666 3347.164\n 3338.907 3378.5923 3360.6064 3305.964 3442.3125 3311.5847 3361.063\n 3324.9878 3350.7173 3333.2905 3279.183 3299.0684 3404.456 3403.5706\n 3278.6248 3356.1763 3328.691 3387.9197 3343.734 3307.1768 3402.735\n 3338.527 3339.709 3332.0444 3336.961 3416.8096 3301.7812 3313.5889\n 3380.7527 3229.947 3382.1414 3312.222 3413.2021 3377.302 3357.573\n 3304.1414 3259.849 3324.138 3273.306 3227.2112 3225.4175 3396.7375\n 3401.6035 3275.2593 3269.3818 3301.4133 3339.978 3339.0276 3341.8337\n 3375.9092 3309.7783 3277.1812 3352.794 3298.4666 3393.688 3312.363\n 3354.982 3309.4924 3340.8213 3325.2043 3284.1707 3340.8293 3298.857\n 3310.8489 3312.0234 3377.0862 3368.5737 3332.058 3316.8328 3367.0366\n 3362.615 3405.4695 3384.4697 3307.1704 3378.8865 3280.6782 3244.38\n 3168.4607 3051.5896 3118.3088 3061.5205 3154.3413 3188.655 3223.7893\n 3227.5696 3145.5352 3208.8545 3252.9346 3226.4426 3313.4368 3303.3274\n 3248.9338 3283.9382 3257.796 3347.1384 3336.0757 3304.2012 3366.5222\n 3323.5 3338.9626 3372.8433 3415.934 3386.4443 3403.3513 3308.694\n 3295.8152 3319.3135 3291.2515 3368.75 3320.9507 3332.458 3410.8574\n 3401.3838 3315.3943 3290.952 3357.8643 3348.7886 3290.5889 3313.5142\n 3295.665 3303.6182 3344.1904 3384.973 3358.2095 3305.8655 3292.6174\n 3271.4092 3255.6382 3286.7476 3298.151 3306.4937 3368.882 3316.0684\n 3318.777 3323.3853 3223.4932 3260.3542 3251.987 3301.287 3194.3655\n 3234.0386 3249.2866 3227.64 3236.106 3194.0627 3163.5974 3339.4993\n 3230.849 3244.4563 3222.9424 3184.3032 3153.7336 3144.4385 3062.1997\n 3085.1367 3001.7964 2987.0706 3038.392 2955.467 2947.713 2766.6958\n 2639.781 2498.5234 2372.1748 2298.1025 2389.8945 2983.0366 3228.1707\n 2928.231 2886.7231 2775.3958 2898.038 2944.6863 3061.221 3055.4778\n 3086.4539 3108.276 3100.0647 3077.507 3195.3516 3242.6838 3209.9778\n 3190.3042 3250.1235 3201.7134 3220.19 3172.573 3176.1414 3224.9587\n 3212.2854 3231.5488 3207.347 3246.7275 3211.9531 3275.3987 3231.1587\n 3263.38 3242.7717 3264.8267 3206.8284 3174.561 3197.929 3224.2756\n 3242.8674 3235.6033 3197.9348 3274.9023 3184.9941 3255.871 3195.3867\n 3245.4666 3269.6992 3259.886 3239.2742 3239.547 3256.9866 3267.2036\n 3229.852 3341.0044 3224.8293 3206.8384 3252.5476 3245.5115 3195.735\n 3253.2402 3182.7402 3281.5664 3198.8416 3254.4565 3293.992 3270.3975\n 3261.4578 3242.7705 3330.464 3302.4912 3258.9072 3247.0095 3270.2595\n 3228.2534 3276.3176 3361.566 3219.1824 3277.5493 3258.3245 3301.5527\n 3323.1875 3264.1816 3226.7295 3228.774 3220.0532 3277.316 3223.8638\n 3157.0625 3272.0295 3213.177 3215.973 3296.4229 3157.1313 3192.7627\n 3217.982 3259.3164 3192.7256 3188.0522 3178.884 3243.3123 3231.5437\n 3186.0073 3203.3718 3266.456 3184.971 3281.5757 3211.2295 3200.593\n 3209.3113 3376.0818 3223.0786 3280.5054 3245.2214 3257.0527 3205.914\n 3306.8452 3248.3088 3253.282 3179.0903 3252.1335 3307.3257 3250.5063\n 3190.8416 3189.8782 3165.7114 3180.8765 3170.445 3201.0332 3178.6133\n 3217.1545 3187.0632 3191.853 3153.8386 3229.4182 3256.4717 3226.2556\n 3248.5586 3220.585 3200.521 3227.9746 3144.7952 3200.089 3182.9792\n 3107.0151 3206.7354 3224.4316 3191.381 3187.4714 3118.6565 3182.8315\n 3239.001 3250.7234 3224.0574 3211.5803 3214.4482 3238.6711 3198.2595\n 3220.5034 3252.7544 3235.328 3219.3901 3192.2354 3190.293 3244.222\n 3182.224 3162.773 3147.8372 3190.7168 3160.6633 3154.7117 3209.0742\n 3186.88 3177.663 3187.1057 3144.3237 3170.429 3147.074 3207.0781\n 3224.4827 3190.3245 3180.2769 3153.2273 3217.0508 3175.8628 3172.252\n 3187.7034 3206.802 3180.6377 3102.8208 3101.2688 3099.6382 3173.2207\n 3119.3762 3135.2212 3105.2336 3139.7017 3147.9866 3146.828 3143.0051\n 3082.9666 3180.58 3179.9517 3207.2798 3197.0361 3172.9792 3146.5586\n 3215.939 3100.8525 3196.0598 3160.9133 3157.6719 3151.6917 3133.851\n 3162.0388 3177.287 3198.7283 3172.8552 3077.154 3130.0977 3170.9763\n 3199.627 3224.526 3148.9402 3133.177 3225.4622 3143.794 3163.958\n 3186.894 3115.261 3201.0752 3137.6406 3150.148 3158.6384 3165.0854\n 3125.0117 3150.5764 3172.0808 3152.5386 3130.5815 3167.6724 3155.9714\n 3085.364 3165.222 3157.9272 3144.8904 3182.0813 3203.6682 3131.4246\n 3187.419 3151.8518 3174.8345 3185.4526 3112.4587 3123.6394 3131.952\n 3154.344 3068.8367 3155.758 3108.394 3108.9749 3125.066 3068.3916\n 3189.1 3161.5256 3136.8347 3127.574 3145.606 3198.663 3189.2634\n 3151.2527 3180.4443 3118.0088 3066.9197 3160.2402 3117.0747 3142.9575\n 3119.3118 3121.1055 3111.7434 3101.8289 3161.763 3126.676 3102.689\n 3140.6511 3123.1807 3134.9727 3094.686 3128.7542 3124.034 3181.2456\n 3152.2131 3136.4377 3111.3994 3081.1667 3193.3806 3162.359 3094.0852\n 3127.5762 3175.2722 3158.3315 3149.1396 3120.067 3145.5574 3080.9238\n 3134.6633 3132.227 3082.768 3111.6643 3039.6965 3081.2036 3122.45\n 3072.7852 3132.0005 3118.618 3078.4062 3091.7327 3152.2952 3173.3408\n 3044.1218 3117.949 3151.6697 3130.7324 3162.2456 3097.5422 3074.832\n 3135.7075 3037.6902 3106.296 3081.112 3078.0442 3083.8022 3036.789\n 3056.013 3091.0322 3196.4756 3089.1152 3088.1885 3151.6746 3136.681\n 3141.0273 3049.7786 3142.2869 3070.8354 3060.5808 3086.9824 3044.0813\n 3085.526 3057.3513 3029.1968 3040.712 3048.4382 3055.5854 3036.5928\n 3114.6138 3123.2446 3057.6462 3057.055 3120.891 3093.86 3035.433\n 3086.639 3107.0664 3046.4211 3064.7417 3052.0142 3086.2095 3045.1304\n 3056.6963 3042.01 3054.261 2958.2712 3046.5222 3070.2974 3052.7185\n 3013.9385 3077.985 3100.2112 3043.1846 3056.5374 3085.5247 3140.8354\n 3141.8792 3159.551 3145.7815 3106.796 3078.4468 3104.031 3116.3403\n 3159.1702 3100.661 3136.0728 3064.2712 3118.47 3131.9727 3022.1326\n 3058.3838 3063.414 3110.9646 3038.4966 3026.9045 3039.6226 3064.1968\n 3068.1936 3022.3938 3083.7898 3093.024 3125.4165 3035.5112 3017.6946\n 3159.608 3084.1853 3130.516 3119.8806 3104.9219 3111.3208 3158.421\n 3109.6174 3123.3286 3057.5938 3098.974 3076.7053 3100.7766 3075.407\n 3082.5837 3081.1672 2992.7722 3023.2688 3145.5754 3020.5793 3084.2695\n 3065.6353 3028.2368 3059.1946 3020.3381 3063.2231 3044.2954 3058.125\n 2974.006 3008.3645 3045.5232 2969.9236 3028.9954 2973.2603 3036.7966\n 2979.212 3071.9504 3053.5 3063.3723 3057.1821 3020.498 3024.81\n 2931.6597 3011.9866 3063.3623 3054.075 3056.281 3046.35 3024.7568\n 2937.5088 3074.869 3078.024 3004.8545 3006.0872 3063.0674 3061.661\n 3018.8394 3018.5232 3026.4355 2975.2847 3040.6584 3017.9932 3064.3103\n 3081.4426 3065.8174 3039.8672 3038.9734 2989.076 3023.8115 3026.8042\n 3039.2312 3031.6406 3001.5588 3001.1948 3020.6448 2979.1719 2971.257\n 3005.086 3034.3633 3061.6936 2986.7285 3024.091 2984.8804 3069.076\n 2905.3616 3033.7178 3023.6082 3050.339 3024.604 3053.0127 3069.2305\n 3013.9233 2973.5813 3025.661 2972.4973 2980.6958 3016.1187 2992.622\n 3057.9001 3030.5486 2964.8623 3001.2002 3044.3752 3056.7456 2989.968\n 3037.3762 2967.449 2924.3308 3012.6587 2901.5837 2962.5647 2975.5432\n 2950.5815 2992.8777 2932.9614 2966.4546 3006.7417 3002.3945 3028.0908\n 2985.495 3013.0618 3005.2732 2993.4553 2994.3328 2953.9077 3004.6665\n 3016.2856 2924.903 3016.9856 2988.467 2973.1892 2973.3354 3041.0112\n 2979.3418 3007.5703 2998.7036 2965.5042 2935.1829 3012.2014 2929.1611\n 2942.4697 2903.8057 2879.0305 2935.8364 2926.6233 2981.3767 2925.9775\n 2949.5366 2947.1038 3000.2808 2934.5417 2956.8704 2910.4297 2954.0168\n 2925.2363 2931.7075 2926.6104 2910.1277 2857.5403 2958.8567 2887.5369\n 2914.5974 2889.4407 2925.8726 2882.6929 2939.6497 2920.054 2940.9448\n 2948.5437 2963.0715 2966.3115 2903.6003 2954.4016 2886.6597 2918.6262\n 2968.0388 2938.1052 2987.038 2873.8806 2902.2764 2951.1768 2886.075\n 2970.9683 2932.673 2951.2644 2924.6228 2902.5579 2905.704 2915.2625\n 2941.0867 2935.2883 2949.182 2954.9644 2976.795 2995.2056 2929.3792\n 2922.9182 2941.4631 2937.9866 2989.8662 2939.5989 2962.7825 2938.5132\n 2917.7485 2903.3408 2960.8945 2888.7104 2875.5571 2990.0273 2913.8552\n 2952.601 2927.189 2939.6902 2953.496 2970.0762 2901.7866 2925.3416\n 2935.7737 2922.7866 2908.7097 2980.2617 2989.902 2895.5776 2934.51\n 2926.183 2937.1853 2921.654 2936.6814 2891.5417 2974.4373 2952.1965\n 2886.2578 2908.2776 2932.4219 2980.4211 2937.7075 2985.3599 2931.696\n 2923.9158 2901.1602 2890.4119 2896.4675 2902.572 2874.2217 2834.0173\n 2914.2205 2860.0227 2884.3125 2919.4163 2881.757 2883.745 2913.1443\n 2958.1968 2930.1758 2934.165 2921.304 2880.1338 2981.131 2909.5342\n 2902.992 2898.538 2898.1387 2887.8657 2945.5107 2968.2068 2917.23\n 2937.3499 2910.968 2930.871 2917.3896 2859.0237 2987.3274 2934.681\n 2871.6267 2879.6863 2985.0286 2902.268 2874.194 2804.9607 2890.202\n 2914.7285 2855.2285 2957.4937 2868.7583 2888.1384 2893.8625 2858.8157\n 2890.7395 2910.8176 2873.1118 2801.842 2859.4756 2930.6965 2846.85\n 2847.4019 2895.6685 2895.2253 2891.0737 2896.5125 2834.1914 2846.173\n 2851.4004 2884.485 2887.249 2840.242 2838.6055 2848.9075 2795.0962\n 2885.5742 2942.2402 2920.9956 2861.1453 2863.704 2840.4026 2884.8328\n 2930.3904 2883.8997 2859.315 2879.8962 2858.0835 2820.1455 2768.2542\n 2812.2751 2769.4272 2831.51 2878.749 2887.5664 2903.894 2809.0305\n 2872.8855 2880.5593 2904.7869 2934.642 2866.5522 2888.5151 2879.1858\n 2826.5322 2828.822 2975.3938 2799.5088 2855.6035 2762.8345 2797.0588\n 2776.794 2770.9802 2808.7458 2879.3052 2829.2341 2822.2068 2849.9573\n 2900.5093 2874.9355 2906.7534 2879.2913 2848.3616 2849.7349 2811.9026\n 2892.578 2842.4712 2868.0752 2881.9468 2859.3284 2809.3215 2849.9224\n 2802.5618 2870.442 2816.6094 2831.403 2678.2004 2799.286 2752.1956\n 2796.3093 2861.9617 2862.2427 2825.1536 2787.711 2796.6724 2745.0007\n 2831.8794 2770.3901 2779.504 2811.653 2783.3997 2819.1252 2797.2327\n 2841.3457 2823.9434 2875.4475 2854.9932 2780.3613 2838.759 2886.7097\n 2835.7688 2871.7158 2761.194 2789.0278 2772.0938 2821.2456 2771.974\n 2862.472 2862.2373 2786.1934 2799.4119 2801.0874 2812.7358 2830.1025\n 2847.037 2840.2869 2833.1055 2821.1157 2780.2869 2818.7786 2788.1807\n 2780.4766 2848.0676 2695.8296 2763.194 2824.2307 2858.3914 2814.3235\n 2876.2075 2823.112 2822.9126 2776.3416 2824.954 2847.8816 2814.0237\n 2889.6277 2878.8918 2839.4988 2853.6553 2765.5718 2805.883 2851.9814\n 2820.812 2798.3027 2736.3804 2768.9128 2844.9395 2777.3052 2779.7456\n 2766.8022 2774.4495 2846.676 2870.8271 2757.4907 2807.1204 2787.343\n 2815.997 2788.0063 2722.6855 2842.7668 2768.802 2790.6426 2844.8848\n 2797.2478 2808.7634 2783.0557 2815.2576 2787.066 2843.454 2831.3772\n 2772.1711 2835.2822 2807.8203 2760.9326 2822.4482 2795.2378 2831.8457\n 2770.6223 2709.5278 2797.4607 2793.2158 2790.0876 2785.693 2844.7095\n 2835.7979 2818.6143 2759.2114 2786.6118 2829.63 2743.0098 2813.5603\n 2807.7944 2688.1035 2790.016 2779.3447 2792.3066 2745.488 2770.3247\n 2703.0166 2729.6245 2774.9927 2831.5996 2744.225 2697.8105 2699.2075\n 2696.6636 2717.2783 2674.9727 2657.0498 2682.322 2742.7095 2719.033\n 2667.6633 2792.0808 2823.033 2710.4233 2725.0344 2755.075 2731.1025\n 2717.526 2779.1438 2807.526 2768.1033 2795.5015 2756.4233 2788.0059\n 2786.5537 2705.4175 2779.0952 2796.165 2732.1187 2706.775 2703.6025\n 2784.0476 2742.7666 2706.2693 2736.97 2808.4797 2757.2715 2763.5586\n 2814.2288 2738.6262 2641.7227 2810.5935 2790.207 2743.1006 2720.866\n 2813.1028 2730.318 2757.3079 2719.8352 2735.6326 2762.606 2663.4114\n 2738.7363 2716.0208 2752.417 2677.8096 2694.7202 2668.081 2751.526\n 2773.8208 2765.3074 2765.9062 2736.5986 2795.0132 2726.0078 2835.912\n 2820.2354 2769.7969 2749.604 2756.9805 2771.6152 2684.388 2806.6235\n 2756.422 2746.1401 2807.198 2739.5532 2763.2983 2707.31 2720.3801\n 2783.9895 2801.6443 2765.014 2759.8665 2760.2764 2754.7986 2727.8542\n 2697.9368 2685.231 2732.907 2838.413 2648.146 2714.4912 2667.4075\n 2724.174 2772.0464 2724.7764 2761.417 2677.1445 2765.0767 2743.4346\n 2667.6797 2759.1519 2755.8347 2744.5059 2692.053 2738.0188 2705.342\n 2724.4067 2724.3132 2698.0527 2773.065 2702.8054 2701.5923 2768.4849\n 2757.3457 2731.8435 2762.7239 2748.0176 2758.1592 2739.7139 2684.2139\n 2703.4016 2697.6663 2695.8223 2667.9055 2645.9539 2643.9636 2623.1038\n 2670.2183 2641.8538 2643.1714 2653.4368 2674.288 2659.1716 2663.039\n 2706.9585 2697.6606 2706.905 2698.0483 2725.7434 2706.124 2679.9946\n 2702.2363 2716.0137 2733.7874 2705.711 2683.2036 2652.465 2645.758\n 2666.1094 2690.0427 2672.2834 2712.2266 2613.2175 2636.5735 2633.8425\n 2668.6345 2637.9019 2647.3987 2649.5325 2628.4685 2626.9302 2636.1248\n 2679.4634 2639.0625 2689.0806 2702.0396 2614.5242 2620.804 2677.8906\n 2674.96 2655.8772 2653.9827 2685.435 2660.934 2682.4756 2649.313\n 2642.012 2685.4229 2714.8687 2681.2544 2729.8564 2674.316 2669.1577\n 2730.7231 2723.3955 2699.4688 2782.7207 2692.0342 2709.495 2722.9146\n 2666.3337 2685.5496 2685.4236 2633.6406 2753.3782 2629.0515 2663.8677\n 2615.0977 2614.8755 2622.6777 2709.203 2652.5273 2683.8616 2639.52\n 2670.3088 2682.4258 2661.3464 2633.346 2726.576 2689.778 2712.4697\n 2737.9185 2688.9329 2658.4482 2702.3923 2704.116 2657.3923 2667.8154\n 2663.571 2697.121 2701.507 2678.6245 2695.8708 2653.0322 2675.8542\n 2641.3123 2626.8013 2691.5532 2667.254 2655.126 2663.471 2677.5974\n 2704.5923 2695.5684 2698.4236 2591.2153 2646.8152 2645.8845 2694.8237\n 2688.1348 2644.9155 2638.8472 2624.4348 2683.668 2646.069 2668.221\n 2612.0374 2676.5984 2685.4197 2652.3848 2677.943 2695.095 2699.568\n 2688.8408 2619.1196 2653.3804 2631.1865 2623.6997 2666.8655 2606.1846\n 2642.0369 2610.1858 2642.254 2663.4646 2671.3093 2498.1538 2604.6594\n 2662.5574 2683.5425 2704.5461 2666.0222 2603.24 2648.081 2657.824\n 2614.9111 2645.52 2647.9539 2636.8416 2613.8545 2623.735 2612.7517\n 2616.0232 2710.9087 2622.39 2609.6895 2588.4727 2691.56 2669.379\n 2709.581 2697.8975 2632.3596 2711.5435 2635.7024 2700.1616 2688.\n 2608.5767 2640.4768 2560.597 2565.8796 2653.3362 2617.7134 2644.5107\n 2593.8103 2573.8425 2571.143 2616.895 2640.5847 2724.1733 2660.871\n 2675.8271 2676.2627 2624.633 2669.09 2666.9707 2674.7593 2610.1646\n 2629.855 2591.986 2629.6948 2640.9155 2709.801 2598.741 2626.771\n 2707.2405 2730.9436 2661.709 2610.499 2678.0608 2681.8115 2563.7432\n 2576.2964 2654.9626 2712.162 2596.9922 2655.743 2634.3787 2605.4504\n 2648.015 2633.4985 2662.6401 2614.4802 2689.6814 2574.8257 2606.2146\n 2688.4321 2646.0508 2624.418 2668.5083 2567.5103 2587.397 2623.5234\n 2560.712 2659.2195 2531.9548 2737.9868 2633.7944 2537.883 2585.8904\n 2635.6162 2619.5952 2656.3105 2660.7073 2662.4253 2639.1758 2669.186\n 2629.0247 2640.0684 2622.6172 2601.2822 2688.9985 2673.8992 2613.5918\n 2556.1975 2597.9714 2659.55 2531.8608 2639.0352 2605.6125 2608.0671\n 2576.8142 2645.0967 2607.833 2736.0745 2737.948 2734.803 2605.7124\n 2585.251 2625.4175 2784.8584 2628.831 2500.7688 2500.959 2615.4155\n 2536.3665 2536.6018 2890.919 2803.6812 2799.0051 2750.372 2829.6008\n 2597.354 2842.148 2602.6565 2682.8486 2482.7092 2475.879 2785.1677\n 2548.516 2580.6663 2756.6836 2649.6724 2657.067 2798.486 2510.8672\n 2555.3286 2061.5469]\n"
],
[
"plt.figure(figsize = (15,7))\nidx = numpy.random.choice(specs.shape[0])\nplt.rcParams['figure.figsize'] = 10, 4\nplt.figure()\nplt.title('Original')\nplt.step(waves[idx], specs[idx], \"k\")\nplt.xlabel(\"observed wavelength\")\nplt.ylabel(\"$\\\\mathrm{flux\\\\,[10^{-17}\\\\, erg \\\\, cm^{-2} \\\\, s^{-1} \\\\, \\\\AA^{-1}] }$\")\nplt.tight_layout()\n\nplt.figure()\nplt.title('Noise removed, common grid')\nplt.step(common_wave, specs_final[idx], \"k\")\nplt.xlabel(\"observed wavelength\")\nplt.ylabel(\"$\\\\mathrm{flux\\\\,[10^{-17}\\\\, erg \\\\, cm^{-2} \\\\, s^{-1} \\\\, \\\\AA^{-1}] }$\")\nplt.tight_layout()",
"_____no_output_____"
],
[
"import umap\nfit = umap.UMAP()",
"_____no_output_____"
],
[
"em = fit.fit_transform(specs_final)\nx = em[:,0]\ny = em[:,1]",
"_____no_output_____"
],
[
"plt.figure(figsize = (8,7))\nplt.scatter(x, y)\nplt.show()",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a626ba47a0fb4ceb24772ef056936177d9df649
| 22,313 |
ipynb
|
Jupyter Notebook
|
examples/prepare/advanced_data_prep_geo.ipynb
|
zestai/zrp
|
14211cc90857e21a522637c31d42b0b663fcd368
|
[
"Apache-2.0"
] | 14 |
2022-02-09T20:58:31.000Z
|
2022-03-29T16:32:01.000Z
|
examples/prepare/advanced_data_prep_geo.ipynb
|
zestai/zrp
|
14211cc90857e21a522637c31d42b0b663fcd368
|
[
"Apache-2.0"
] | 3 |
2022-02-04T17:25:38.000Z
|
2022-03-15T01:13:32.000Z
|
examples/prepare/advanced_data_prep_geo.ipynb
|
zestai/zrp
|
14211cc90857e21a522637c31d42b0b663fcd368
|
[
"Apache-2.0"
] | null | null | null | 36.04685 | 898 | 0.433559 |
[
[
[
"# How to Build a Geo Lookup Table\nThe purpose of this notebook is to illustrate how to build a Geo-Lookup table, to use for geocoding addresses. In the ZRP pipeline, data is inputted as a dataframe with the following column: first name, middle name, last name, house number, street address (street name), city, state, zip code, and zest key. The 'zest key' must be specified to establish correspondence between inputs and outputs; it's effectively used as an index for the data table. The address data is mapped to a geocoded location (block group, census tract, or zipcode) using the lookup tables generated via the processes demonstrated in this example. This geocoded address will then be cross referenced with the ACS tables to determine the ACS features that will be a part of the feature vector ultimately trained on. In this example Alabama county level Census Tigerline shapefiles will be used generate a lookup table.",
"_____no_output_____"
]
],
[
[
"%load_ext autoreload\n%autoreload 2\n%config Completer.use_jedi=False",
"_____no_output_____"
],
[
"from os.path import join, expanduser, dirname\nimport pandas as pd\nimport sys\nimport os\nimport re\nimport warnings",
"_____no_output_____"
],
[
"warnings.filterwarnings(action='ignore')\nhome = expanduser('~')\n\nsrc_path = '{}/zrp'.format(home)\nsys.path.append(src_path)",
"_____no_output_____"
]
],
[
[
"Predefine paths & required parameters",
"_____no_output_____"
]
],
[
[
"# Support files path pointing to where the raw tigerline shapefile data is stored\nsupport_files_path = \"INSERT-PATH-HERE\"\n# Year of shapefile data\nyear = \"2019\"\n# Geo level to build lookup table at\nst_cty_code = \"01001\"",
"_____no_output_____"
]
],
[
[
"Import Geo Lookup Functions",
"_____no_output_____"
]
],
[
[
"from zrp.prepare.geo_lookup import *",
"/home/kam/.conda/envs/zrp_q1_22/lib/python3.7/site-packages/statsmodels/compat/pandas.py:35: DeprecationWarning: distutils Version classes are deprecated. Use packaging.version instead.\n version = LooseVersion(pd.__version__)\n/home/kam/.conda/envs/zrp_q1_22/lib/python3.7/importlib/_bootstrap.py:219: RuntimeWarning: numpy.ufunc size changed, may indicate binary incompatibility. Expected 192 from C header, got 216 from PyObject\n return f(*args, **kwds)\n"
]
],
[
[
"### Initialize `GeoLookUpBuilder`\nThis class constructs geographic lookup tables that enable geocoding. Census Tigerline shapefiles are required for this module to run. You can retrieve 2019 shapefiles from [https://www2.census.gov/geo/tiger/TIGER2019/](https://www2.census.gov/geo/tiger/TIGER2019/)",
"_____no_output_____"
]
],
[
[
"geo_build = GeoLookUpBuilder(support_files_path = support_files_path, year = year)",
"_____no_output_____"
]
],
[
[
"### Run `GeoLookUpBuilder`\nProvide the state-county fips code to build a county level lookup table.\n- No data is out because `save_table` is set to False. If True then the data will be saved to a file ",
"_____no_output_____"
]
],
[
[
"%%time\noutput = geo_build.transform(st_cty_code, save_table = False)",
"Directory already exists\nDirectory already exists\nShapefile input: /d/shared/zrp/shared_data/raw/geo/2019\nLookup Table output: /d/shared/zrp/shared_data/processed/geo/2019_backup\n\n ... Loading requirements\n ... Creating lookup table\n ... Formatting lookup table\n [Start] Processing lookup data\n ...processing\n [Completed] Processing lookup data\n Number of observations: 6134\n Is key unique: False\n{'is_empty': False, 'is_all_missing': False, 'n_obs': 6134, 'is_unique_key': False, 'pct_na': {'TLID': 0.0, 'TFID': 0.0, 'ARID': 0.0, 'LINEARID': 0.0, 'ZEST_FULLNAME': 0.0, 'FROMHN': 0.0, 'TOHN': 0.0, 'ZEST_ZIP': 0.0, 'EDGE_MTFCC': 0.0, 'ROAD_MTFCC': 0.0, 'PARITY': 0.0, 'FROMTYP': 0.7508966416693837, 'TOTYP': 0.7425823280078252, 'OFFSET': 0.0, 'PLUS4': 1.0, 'STATEFP': 0.0, 'COUNTYFP': 0.0, 'FROMADD': 0.0, 'TOADD': 0.0, 'SIDE': 0.0, 'STATEFP10': 0.0, 'COUNTYFP10': 0.0, 'TRACTCE10': 0.0, 'BLKGRPCE10': 0.0, 'BLOCKCE10': 0.0, 'ZCTA5CE10': 0.0, 'PUMACE10': 0.0, 'TRACTCE': 0.0, 'BLKGRPCE': 0.0, 'BLOCKCE': 0.0, 'ZCTA5CE': 0.0, 'TTRACTCE': 1.0, 'TBLKGPCE': 1.0, 'PUMACE': 0.0, 'RAW_ZEST_ZIP': 0.0, 'RAW_ZEST_STATEFP': 0.0, 'RAW_ZEST_COUNTYFP': 0.0, 'RAW_ZEST_FULLNAME': 0.0, 'RAW_ZEST_TRACTCE': 0.0, 'RAW_ZEST_BLKGRPCE': 0.0}}\nNo tables were saved\nCPU times: user 5.37 s, sys: 105 ms, total: 5.48 s\nWall time: 5.51 s\n"
]
],
[
[
"### Inspect the output\n",
"_____no_output_____"
]
],
[
[
"output.head()",
"_____no_output_____"
],
[
"output.tail()",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
4a626d46d4ee92823d93313c98b8adff2c073f24
| 51,956 |
ipynb
|
Jupyter Notebook
|
NDC Fixes.ipynb
|
nggdpp/ndc-experiment
|
04a22b2740c6e43c0cb41968f18aaf3798d3d4e8
|
[
"Unlicense"
] | null | null | null |
NDC Fixes.ipynb
|
nggdpp/ndc-experiment
|
04a22b2740c6e43c0cb41968f18aaf3798d3d4e8
|
[
"Unlicense"
] | 5 |
2018-11-20T14:33:07.000Z
|
2018-11-24T19:14:46.000Z
|
NDC Fixes.ipynb
|
nggdpp/ndc-experiment
|
04a22b2740c6e43c0cb41968f18aaf3798d3d4e8
|
[
"Unlicense"
] | 1 |
2018-11-13T16:15:29.000Z
|
2018-11-13T16:15:29.000Z
| 64.064118 | 771 | 0.729175 |
[
[
[
"This notebook is scratch space for some relatively simple tweaks I'm making to ScienceBase Items in the NDC in order to better position the system for building new data indexing code against. It requires authentication for using the sciencebasepy package (in PyPI) to write changes to ScienceBase.",
"_____no_output_____"
]
],
[
[
"import sciencebasepy\nfrom IPython.display import display",
"_____no_output_____"
],
[
"sb = sciencebasepy.SbSession()",
"_____no_output_____"
]
],
[
[
"This little function is something I might spruce up and put in a pynggdpp package I'm considering. It uses the ScienceBase Vocab to retrieve a \"fully qualified\" term for use. Another approach would be to generalize it and contribute it to the sciencebasepy package, but of course, the ScienceBase Vocab kind of sucks in terms of its long-term potential. I could, instead, put some time into developing a more robust vocabulary, express it through the ESIP Community Ontology Repository, and then build code around terms resolvable to that source.",
"_____no_output_____"
]
],
[
[
"import requests\n\ndef ndc_collection_type_tag(tag_name):\n vocab_search_url = f'https://www.sciencebase.gov/vocab/5bf3f7bce4b00ce5fb627d57/terms?nodeType=term&format=json&name={tag_name}'\n r_vocab_search = requests.get(vocab_search_url).json()\n if len(r_vocab_search['list']) == 1:\n tag = {'type':'theme','name':r_vocab_search['list'][0]['name'],'scheme':r_vocab_search['list'][0]['scheme']}\n return tag\n else:\n return None",
"_____no_output_____"
],
[
"username = input(\"Username: \")\nsb.loginc(str(username))",
"Username: [email protected]\n········\n"
]
],
[
[
"# Set item type tags\nI opted to use a simple vocabulary that sets items as ndc_organization, ndc_folder, or ndc_collection to help classify the primary items in the catalog as to their function. I did this in batches, being careful to review the items from a given data owner to see whether or not they did anything \"out of the orginary\" before applying tags. The parent ID supplied in the first line of this block deterimined the given batch of items to run through. The main thing I did through this was to flag certain items as \"folders,\" basically extraneous organizational constructs that some data owners decided to employ directly in ScienceBase. We may revisit this as we get into IGSN work as there may be a desire to set these up as actual collections with subcollections.",
"_____no_output_____"
]
],
[
[
"collection_items = sb.get_child_ids('5ad902ade4b0e2c2dd27a82c')\n\nitem_count = 0\nfor sbid in collection_items:\n this_item = sb.get_item(sbid, {'fields':'tags'})\n isFolder = None\n if 'tags' in this_item.keys():\n isFolder = next((t for t in this_item['tags'] if t['name'] == 'ndc_folder'), None)\n if isFolder is None:\n item = {'id':sbid,'tags':[ndc_collection_type_tag('ndc_collection')]}\n print(item)\n sb.update_item(item)\n item_count = item_count + 1\n \nprint('===========', item_count)",
"_____no_output_____"
]
],
[
[
"# Identify and flag metadata.xml files\nIn the original setup of the NDC from its roots in the Comprehensive Science Catalog, the results of a survey for collections from the State Geological Surveys were pulled from a Filemaker database into \"metadata.xml\" files that were processed into the Item model. These files are still onboard the ScienceBase Items, which is a reasonable thing to do and keep around in case we want to reprocess them in a different way. It seems reasonable to go ahead and verify these files and flag them with a title so that they can be separated out from files to examine for possible collection item processing.\n\nJust to make sure I don't inadvertenly flag something wrong, I'll write this process to open up and look at the individual \"metadata.xml\" files to ensure they are what I think they are before setting a title property.",
"_____no_output_____"
]
],
[
[
"parentId = '4f4e4760e4b07f02db47dfb4'\nqueryRoot = 'https://www.sciencebase.gov/catalog/items?format=json&max=1000&'\ntag_scheme_collections = ndc_collection_type_tag('ndc_collection')\nfields_collections = 'title,files'\nsb_query_collections = f'{queryRoot}fields={fields_collections}&folderId={parentId}&filter=tags%3D{tag_scheme_collections}'\nr_ndc_collections = requests.get(sb_query_collections).json()",
"_____no_output_____"
],
[
"for collection in [c for c in r_ndc_collections['items'] if 'files' in c.keys() and next((f for f in c['files'] if f['name'] == 'metadata.xml'), None) is not None]:\n the_files = collection['files']\n f_metadata_xml = next(f for f in the_files if f['name'] == 'metadata.xml')\n \n if requests.get(f_metadata_xml['url']).text[39:47] == '<NGGDPP>':\n new_files = []\n for f in collection['files']:\n if f['name'] == 'metadata.xml':\n f['title'] = 'Collection Metadata Source File'\n new_files.append(f)\n new_item = {'id':collection['id'], 'files':new_files}\n print(sb.update_item(new_item)['link']['url'])",
"https://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df544\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da8ec\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da976\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df543\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df4e0\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df363\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df226\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df294\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df12f\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df160\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d917a\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d906f\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1ae\nhttps://www.sciencebase.gov/catalog/item/4f4e4acae4b07f02db67d22b\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df2af\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df2bf\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df11f\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1cc\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df136\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df2a5\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d88b9\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df214\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df3ce\nhttps://www.sciencebase.gov/catalog/item/4f4e4a94e4b07f02db658de1\nhttps://www.sciencebase.gov/catalog/item/4f4e4a94e4b07f02db658e0d\nhttps://www.sciencebase.gov/catalog/item/4f4e4a94e4b07f02db658de9\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df2c5\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df2b9\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df299\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da807\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d8e76\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d8fb2\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d9057\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df20f\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1b7\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d8760\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df4a0\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df4a1\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df4b6\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d884a\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d8600\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df321\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d883a\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df4d0\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df165\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d9039\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df42f\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df24a\nhttps://www.sciencebase.gov/catalog/item/4f4e4b23e4b07f02db6ae371\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df448\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da823\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d8ea6\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df3fb\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d892f\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df528\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df51a\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df4a3\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d87b7\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df52b\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df52d\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df3e2\nhttps://www.sciencebase.gov/catalog/item/4f4e4aaae4b07f02db66936f\nhttps://www.sciencebase.gov/catalog/item/4f4e4aaae4b07f02db669377\nhttps://www.sciencebase.gov/catalog/item/4f4e4b24e4b07f02db6ae5cb\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1d8\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df35f\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df35a\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df166\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df433\nhttps://www.sciencebase.gov/catalog/item/4f4e49f2e4b07f02db5ef1cb\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df23f\nhttps://www.sciencebase.gov/catalog/item/4f4e48b1e4b07f02db530879\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d8f07\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df19e\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1ba\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1f3\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df12e\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df54e\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df14c\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df14d\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df384\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da9de\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df0fe\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df227\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df55f\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da617\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df4d2\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df485\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df2fe\nhttps://www.sciencebase.gov/catalog/item/4f4e4a94e4b07f02db658daf\nhttps://www.sciencebase.gov/catalog/item/4f4e4a94e4b07f02db658dd0\nhttps://www.sciencebase.gov/catalog/item/4f4e4a94e4b07f02db658d93\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df290\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df28e\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df0fa\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df228\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d8236\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da9ac\nhttps://www.sciencebase.gov/catalog/item/4f4e4a5fe4b07f02db634678\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df346\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da85b\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da8ae\nhttps://www.sciencebase.gov/catalog/item/4f4e4acae4b07f02db67d223\nhttps://www.sciencebase.gov/catalog/item/4f4e4ac9e4b07f02db67c705\nhttps://www.sciencebase.gov/catalog/item/4f4e49cfe4b07f02db5da651\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df225\nhttps://www.sciencebase.gov/catalog/item/4f4e4b25e4b07f02db6aee71\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df374\nhttps://www.sciencebase.gov/catalog/item/4f4e4acae4b07f02db67d225\nhttps://www.sciencebase.gov/catalog/item/4f4e4acae4b07f02db67d22c\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1d7\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df2c0\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df13d\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df244\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1c9\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df2ef\nhttps://www.sciencebase.gov/catalog/item/4f4e4a94e4b07f02db658d63\nhttps://www.sciencebase.gov/catalog/item/4f4e4a94e4b07f02db658dc0\nhttps://www.sciencebase.gov/catalog/item/4f4e49cbe4b07f02db5d877f\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df178\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df1de\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df4d9\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df558\nhttps://www.sciencebase.gov/catalog/item/4f4e49cce4b07f02db5d8e2e\nhttps://www.sciencebase.gov/catalog/item/4f4e49d8e4b07f02db5df54d\n"
],
[
"# Just to make sure\ndifferent_purpose_metadata_xml = [c for c in r_ndc_collections['items'] if 'files' in c.keys() and next((f for f in c['files'] if f['name'] == 'metadata.xml' and 'title' in f.keys() and f['title'] != 'Collection Metadata Source File'), None) is not None]\ndisplay(different_purpose_metadata_xml) ",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
4a62720570159ec824b90d38fbc7722bcdfd55ef
| 9,001 |
ipynb
|
Jupyter Notebook
|
samples/lightweight-component/lightweight_component.ipynb
|
evan-hataishi/kfp-tekton
|
6e1f367841c7add4ca13e5472220939846da81b0
|
[
"Apache-2.0"
] | 126 |
2020-06-25T01:04:18.000Z
|
2022-01-10T00:36:15.000Z
|
samples/lightweight-component/lightweight_component.ipynb
|
evan-hataishi/kfp-tekton
|
6e1f367841c7add4ca13e5472220939846da81b0
|
[
"Apache-2.0"
] | 1,720 |
2021-01-25T09:32:00.000Z
|
2022-03-31T08:09:51.000Z
|
samples/lightweight-component/lightweight_component.ipynb
|
kfp-tekton-bot/kfp-tekton
|
386ad234a43ba91999360c63807bcd62b9a78878
|
[
"Apache-2.0"
] | 85 |
2019-10-24T04:04:36.000Z
|
2022-03-01T10:52:57.000Z
| 32.970696 | 368 | 0.605377 |
[
[
[
"# Lightweight python components\nLightweight python components do not require you to build a new container image for every code change. They're intended to use for fast iteration in notebook environment.\n\n**Building a lightweight python component**\n\nTo build a component just define a stand-alone python function and then call kfp.components.func_to_container_op(func) to convert it to a component that can be used in a pipeline.\n\nThere are several requirements for the function:\n\n- The function should be stand-alone. It should not use any code declared outside of the function definition. Any imports should be added inside the main function. Any helper functions should also be defined inside the main function.\n- The function can only import packages that are available in the base image. If you need to import a package that's not available you can try to find a container image that already includes the required packages. (As a workaround you can use the module subprocess to run pip install for the required package. There is an example below in my_divmod function.)\n- If the function operates on numbers, the parameters need to have type hints. Supported types are [int, float, bool]. Everything else is passed as string.\n- To build a component with multiple output values, use the typing.NamedTuple type hint syntax: NamedTuple('MyFunctionOutputs', [('output_name_1', type), ('output_name_2', float)])",
"_____no_output_____"
]
],
[
[
"# Install the dependency packages\n!pip install --upgrade pip\n!pip install numpy tensorflow kfp-tekton",
"_____no_output_____"
]
],
[
[
"**Important**: If you are running this notebook using the Kubeflow Jupyter Server, you need to restart the Python **Kernel** because the packages above overwrited some default packages inside the Kubeflow Jupyter image.",
"_____no_output_____"
]
],
[
[
"import kfp\nimport kfp.components as comp",
"_____no_output_____"
]
],
[
[
"Simple function that just add two numbers:",
"_____no_output_____"
]
],
[
[
"#Define a Python function\ndef add(a: float, b: float) -> float:\n '''Calculates sum of two arguments'''\n return a + b",
"_____no_output_____"
]
],
[
[
"Convert the function to a pipeline operation",
"_____no_output_____"
]
],
[
[
"add_op = comp.func_to_container_op(add)",
"_____no_output_____"
]
],
[
[
"A bit more advanced function which demonstrates how to use imports, helper functions and produce multiple outputs.",
"_____no_output_____"
]
],
[
[
"#Advanced function\n#Demonstrates imports, helper functions and multiple outputs\nfrom typing import NamedTuple\ndef my_divmod(dividend: float, divisor:float) -> NamedTuple('MyDivmodOutput', [('quotient', float), ('remainder', float), ('mlpipeline_ui_metadata', 'UI_metadata'), ('mlpipeline_metrics', 'Metrics')]):\n '''Divides two numbers and calculate the quotient and remainder'''\n #Pip installs inside a component function.\n #NOTE: installs should be placed right at the beginning to avoid upgrading a package\n # after it has already been imported and cached by python\n import sys, subprocess;\n subprocess.run([sys.executable, '-m', 'pip', 'install', 'tensorflow==1.8.0'])\n \n #Imports inside a component function:\n import numpy as np\n\n #This function demonstrates how to use nested functions inside a component function:\n def divmod_helper(dividend, divisor):\n return np.divmod(dividend, divisor)\n\n (quotient, remainder) = divmod_helper(dividend, divisor)\n\n from tensorflow.python.lib.io import file_io\n import json\n \n # Exports a sample tensorboard:\n metadata = {\n 'outputs' : [{\n 'type': 'tensorboard',\n 'source': 'gs://ml-pipeline-dataset/tensorboard-train',\n }]\n }\n\n # Exports two sample metrics:\n metrics = {\n 'metrics': [{\n 'name': 'quotient',\n 'numberValue': float(quotient),\n },{\n 'name': 'remainder',\n 'numberValue': float(remainder),\n }]}\n\n from collections import namedtuple\n divmod_output = namedtuple('MyDivmodOutput', ['quotient', 'remainder', 'mlpipeline_ui_metadata', 'mlpipeline_metrics'])\n return divmod_output(quotient, remainder, json.dumps(metadata), json.dumps(metrics))",
"_____no_output_____"
]
],
[
[
"Test running the python function directly",
"_____no_output_____"
]
],
[
[
"my_divmod(100, 7)",
"_____no_output_____"
]
],
[
[
"#### Convert the function to a pipeline operation\n\nYou can specify an alternative base container image (the image needs to have Python 3.5+ installed).",
"_____no_output_____"
]
],
[
[
"divmod_op = comp.func_to_container_op(my_divmod, base_image='tensorflow/tensorflow:1.11.0-py3')",
"_____no_output_____"
]
],
[
[
"#### Define the pipeline\nPipeline function has to be decorated with the `@dsl.pipeline` decorator",
"_____no_output_____"
]
],
[
[
"import kfp.dsl as dsl\[email protected](\n name='Calculation pipeline',\n description='A toy pipeline that performs arithmetic calculations.'\n)\n# Currently kfp-tekton doesn't support pass parameter to the pipelinerun yet, so we hard code the number here\ndef calc_pipeline(\n a='7',\n b='8',\n c='17',\n):\n #Passing pipeline parameter and a constant value as operation arguments\n add_task = add_op(a, 4) #Returns a dsl.ContainerOp class instance. \n \n #Passing a task output reference as operation arguments\n #For an operation with a single return value, the output reference can be accessed using `task.output` or `task.outputs['output_name']` syntax\n divmod_task = divmod_op(add_task.output, b)\n\n #For an operation with a multiple return values, the output references can be accessed using `task.outputs['output_name']` syntax\n result_task = add_op(divmod_task.outputs['quotient'], c)",
"_____no_output_____"
]
],
[
[
"Compile and run the pipeline into Tekton yaml using kfp-tekton SDK",
"_____no_output_____"
]
],
[
[
"# Specify pipeline argument values\narguments = {'a': '7', 'b': '8'}\n\n# Specify Kubeflow Pipeline Host\nhost=None\n\n# Submit a pipeline run using the KFP Tekton client.\nfrom kfp_tekton import TektonClient\nTektonClient(host=host).create_run_from_pipeline_func(calc_pipeline, arguments=arguments)\n\n# For Argo users, submit the pipeline run using the below client.\n# kfp.Client(host=host).create_run_from_pipeline_func(calc_pipeline, arguments=arguments)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a6277a9367ecb75ada47ef6040d7b9a58180531
| 3,536 |
ipynb
|
Jupyter Notebook
|
notebooks/.ipynb_checkpoints/SimpleTree0-checkpoint.ipynb
|
cesarali/Tag2Hierarchy
|
182bc948a1b4c497a991b12a29ff56023f98c05a
|
[
"MIT"
] | null | null | null |
notebooks/.ipynb_checkpoints/SimpleTree0-checkpoint.ipynb
|
cesarali/Tag2Hierarchy
|
182bc948a1b4c497a991b12a29ff56023f98c05a
|
[
"MIT"
] | 1 |
2017-06-13T13:08:12.000Z
|
2017-06-13T13:08:12.000Z
|
notebooks/.ipynb_checkpoints/SimpleTree0-checkpoint.ipynb
|
cesarali/Tag2Hierarchy
|
182bc948a1b4c497a991b12a29ff56023f98c05a
|
[
"MIT"
] | null | null | null | 26.992366 | 166 | 0.350396 |
[
[
[
"import numpy as np\nimport pandas as pd\nimport json\nimport os\nimport copy\nfrom itertools import chain",
"_____no_output_____"
],
[
"tree = [{\"name\":\"A\",\n \"children\":[{\"name\":\"B\",\"children\":[{\"name\":\"E\",\"children\":[{\"name\":\"Q\",\"children\":None},\n {\"name\":\"R\",\"children\":[{\"name\":\"S\",\"children\":None},\n {\"name\":\"T\",\"children\":[{\"name\":\"U\",\"children\":None},\n {\"name\":\"V\",\"children\":None}]}]}]},\n {\"name\":\"F\",\"children\":None},\n {\"name\":\"G\",\"children\":[{\"name\":\"O\",\"children\":None},{\"name\":\"P\",\"children\":None}]}]},\n {\"name\":\"C\",\"children\":[{\"name\":\"H\",\"children\":[{\"name\":\"M\",\"children\":None},\n {\"name\":\"N\",\"children\":None}]},\n {\"name\":\"I\",\"children\":[{\"name\":\"K\",\"children\":None},\n {\"name\":\"L\",\"children\":None}]},\n {\"name\":\"J\",\"children\":None}]},\n {\"name\":\"D\",\"children\":None}]}]\njson.dump(tree[0],open(\"../visualization/myTree.json\",\"w\"))",
"_____no_output_____"
]
],
[
[
"# Store All Branches",
"_____no_output_____"
]
],
[
[
"def myDf(nodes):\n if nodes is not None:\n for node in nodes:\n yield node\n for child in myDf(node['children']):\n yield child",
"_____no_output_____"
],
[
"for node in myDf(tree):\n print node[\"name\"]",
"A\nB\nE\nQ\nR\nS\nT\nU\nV\nF\nG\nO\nP\nC\nH\nM\nN\nI\nK\nL\nJ\nD\n"
]
]
] |
[
"code",
"markdown",
"code"
] |
[
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
4a627ec4957b2c2496c135372c81e6378a3b6c5b
| 121,615 |
ipynb
|
Jupyter Notebook
|
Deep Learning/Neural Networks/Intro to Neural Networks/student-admissions/StudentAdmissions.ipynb
|
bhupendpatil/Practice
|
9663b3f41e359787cbbd04aedb3db3c605c6ec8e
|
[
"MIT"
] | 1 |
2020-12-23T06:22:29.000Z
|
2020-12-23T06:22:29.000Z
|
Deep Learning/Neural Networks/Intro to Neural Networks/student-admissions/StudentAdmissions.ipynb
|
bhupendpatil/Practice
|
9663b3f41e359787cbbd04aedb3db3c605c6ec8e
|
[
"MIT"
] | 8 |
2020-06-18T19:32:39.000Z
|
2022-03-11T11:37:07.000Z
|
Deep Learning/Neural Networks/Intro to Neural Networks/student-admissions/StudentAdmissions.ipynb
|
bhupendpatil/Practice
|
9663b3f41e359787cbbd04aedb3db3c605c6ec8e
|
[
"MIT"
] | 1 |
2021-01-19T00:16:34.000Z
|
2021-01-19T00:16:34.000Z
| 127.079415 | 27,860 | 0.830769 |
[
[
[
"# Predicting Student Admissions with Neural Networks\nIn this notebook, we predict student admissions to graduate school at UCLA based on three pieces of data:\n- GRE Scores (Test)\n- GPA Scores (Grades)\n- Class rank (1-4)\n\nThe dataset originally came from here: http://www.ats.ucla.edu/\n\n## Loading the data\nTo load the data and format it nicely, we will use two very useful packages called Pandas and Numpy. You can read on the documentation here:\n- https://pandas.pydata.org/pandas-docs/stable/\n- https://docs.scipy.org/",
"_____no_output_____"
]
],
[
[
"# Importing pandas and numpy\nimport pandas as pd\nimport numpy as np\n\n# Reading the csv file into a pandas DataFrame\ndata = pd.read_csv('student_data.csv')\n\n# Printing out the first 10 rows of our data\ndata[:10]",
"_____no_output_____"
]
],
[
[
"## Plotting the data\n\nFirst let's make a plot of our data to see how it looks. In order to have a 2D plot, let's ingore the rank.",
"_____no_output_____"
]
],
[
[
"# Importing matplotlib\nimport matplotlib.pyplot as plt\n\n# Function to help us plot\ndef plot_points(data):\n X = np.array(data[[\"gre\",\"gpa\"]])\n y = np.array(data[\"admit\"])\n admitted = X[np.argwhere(y==1)]\n rejected = X[np.argwhere(y==0)]\n plt.scatter([s[0][0] for s in rejected], [s[0][1] for s in rejected], s = 25, color = 'red', edgecolor = 'k')\n plt.scatter([s[0][0] for s in admitted], [s[0][1] for s in admitted], s = 25, color = 'cyan', edgecolor = 'k')\n plt.xlabel('Test (GRE)')\n plt.ylabel('Grades (GPA)')\n \n# Plotting the points\nplot_points(data)\nplt.show()",
"_____no_output_____"
]
],
[
[
"Roughly, it looks like the students with high scores in the grades and test passed, while the ones with low scores didn't, but the data is not as nicely separable as we hoped it would. Maybe it would help to take the rank into account? Let's make 4 plots, each one for each rank.",
"_____no_output_____"
]
],
[
[
"# Separating the ranks\ndata_rank1 = data[data[\"rank\"]==1]\ndata_rank2 = data[data[\"rank\"]==2]\ndata_rank3 = data[data[\"rank\"]==3]\ndata_rank4 = data[data[\"rank\"]==4]\n\n# Plotting the graphs\nplot_points(data_rank1)\nplt.title(\"Rank 1\")\nplt.show()\nplot_points(data_rank2)\nplt.title(\"Rank 2\")\nplt.show()\nplot_points(data_rank3)\nplt.title(\"Rank 3\")\nplt.show()\nplot_points(data_rank4)\nplt.title(\"Rank 4\")\nplt.show()",
"_____no_output_____"
]
],
[
[
"This looks more promising, as it seems that the lower the rank, the higher the acceptance rate. Let's use the rank as one of our inputs. In order to do this, we should one-hot encode it.\n\n## TODO: One-hot encoding the rank\nUse the `get_dummies` function in numpy in order to one-hot encode the data.",
"_____no_output_____"
]
],
[
[
"# TODO: Make dummy variables for rank\none_hot_data = pd.concat([data, pd.get_dummies(data[\"rank\"],prefix=\"rank\")],axis=1)\n\n# TODO: Drop the previous rank column\none_hot_data = one_hot_data.drop(\"rank\", axis=1)\n\n# Print the first 10 rows of our data\none_hot_data[:10]",
"_____no_output_____"
]
],
[
[
"## TODO: Scaling the data\nThe next step is to scale the data. We notice that the range for grades is 1.0-4.0, whereas the range for test scores is roughly 200-800, which is much larger. This means our data is skewed, and that makes it hard for a neural network to handle. Let's fit our two features into a range of 0-1, by dividing the grades by 4.0, and the test score by 800.",
"_____no_output_____"
]
],
[
[
"# Making a copy of our data\nprocessed_data = one_hot_data[:]\n\n# TODO: Scale the columns\nprocessed_data[\"gre\"] = processed_data[\"gre\"] / 800\nprocessed_data[\"gpa\"] = processed_data[\"gpa\"] / 4.0\n\n# Printing the first 10 rows of our procesed data\nprocessed_data[:10]",
"_____no_output_____"
]
],
[
[
"## Splitting the data into Training and Testing",
"_____no_output_____"
],
[
"In order to test our algorithm, we'll split the data into a Training and a Testing set. The size of the testing set will be 10% of the total data.",
"_____no_output_____"
]
],
[
[
"sample = np.random.choice(processed_data.index, size=int(len(processed_data)*0.9), replace=False)\ntrain_data, test_data = processed_data.iloc[sample], processed_data.drop(sample)\n\nprint(\"Number of training samples is\", len(train_data))\nprint(\"Number of testing samples is\", len(test_data))\nprint(train_data[:10])\nprint(test_data[:10])",
"Number of training samples is 360\nNumber of testing samples is 40\n admit gre gpa rank_1 rank_2 rank_3 rank_4\n337 0 0.775 0.7725 0 0 0 1\n352 1 0.725 0.7800 0 0 1 0\n203 0 0.525 0.9800 0 0 0 1\n187 0 0.725 0.7200 0 1 0 0\n313 1 0.650 0.9125 0 0 0 1\n324 0 0.650 0.6750 0 0 1 0\n343 0 0.725 0.7650 0 1 0 0\n46 1 0.725 0.8650 0 1 0 0\n325 0 0.850 0.9750 1 0 0 0\n146 0 0.600 0.8500 0 1 0 0\n admit gre gpa rank_1 rank_2 rank_3 rank_4\n7 0 0.500 0.7700 0 1 0 0\n10 0 1.000 1.0000 0 0 0 1\n11 0 0.550 0.8050 1 0 0 0\n15 0 0.600 0.8600 0 0 1 0\n20 0 0.625 0.7925 0 0 1 0\n51 0 0.550 0.7825 0 0 0 1\n63 1 0.850 0.9625 0 0 1 0\n69 0 1.000 0.9325 1 0 0 0\n71 0 0.375 0.7300 0 0 0 1\n84 1 0.625 0.9000 0 0 1 0\n"
]
],
[
[
"## Splitting the data into features and targets (labels)\nNow, as a final step before the training, we'll split the data into features (X) and targets (y).",
"_____no_output_____"
]
],
[
[
"features = train_data.drop('admit', axis=1)\ntargets = train_data['admit']\nfeatures_test = test_data.drop('admit', axis=1)\ntargets_test = test_data['admit']\n\nprint(features[:10])\nprint(targets[:10])",
" gre gpa rank_1 rank_2 rank_3 rank_4\n337 0.775 0.7725 0 0 0 1\n352 0.725 0.7800 0 0 1 0\n203 0.525 0.9800 0 0 0 1\n187 0.725 0.7200 0 1 0 0\n313 0.650 0.9125 0 0 0 1\n324 0.650 0.6750 0 0 1 0\n343 0.725 0.7650 0 1 0 0\n46 0.725 0.8650 0 1 0 0\n325 0.850 0.9750 1 0 0 0\n146 0.600 0.8500 0 1 0 0\n337 0\n352 1\n203 0\n187 0\n313 1\n324 0\n343 0\n46 1\n325 0\n146 0\nName: admit, dtype: int64\n"
]
],
[
[
"## Training the 2-layer Neural Network\nThe following function trains the 2-layer neural network. First, we'll write some helper functions.",
"_____no_output_____"
]
],
[
[
"# Activation (sigmoid) function\ndef sigmoid(x):\n return 1 / (1 + np.exp(-x))\ndef sigmoid_prime(x):\n return sigmoid(x) * (1-sigmoid(x))\ndef error_formula(y, output):\n return - y*np.log(output) - (1 - y) * np.log(1-output)",
"_____no_output_____"
]
],
[
[
"# TODO: Backpropagate the error\nNow it's your turn to shine. Write the error term. Remember that this is given by the equation $$ -(y-\\hat{y}) \\sigma'(x) $$",
"_____no_output_____"
]
],
[
[
"# TODO: Write the error term formula\ndef error_term_formula(y, output):\n return -(y - output) * output * (output - 1)",
"_____no_output_____"
],
[
"# Neural Network hyperparameters\nepochs = 1000\nlearnrate = 0.5\n\n# Training function\ndef train_nn(features, targets, epochs, learnrate):\n \n # Use to same seed to make debugging easier\n np.random.seed(42)\n\n n_records, n_features = features.shape\n last_loss = None\n\n # Initialize weights\n weights = np.random.normal(scale=1 / n_features**.5, size=n_features)\n\n for e in range(epochs):\n del_w = np.zeros(weights.shape)\n for x, y in zip(features.values, targets):\n # Loop through all records, x is the input, y is the target\n\n # Activation of the output unit\n # Notice we multiply the inputs and the weights here \n # rather than storing h as a separate variable \n output = sigmoid(np.dot(x, weights))\n\n # The error, the target minus the network output\n error = error_formula(y, output)\n\n # The error term\n # Notice we calulate f'(h) here instead of defining a separate\n # sigmoid_prime function. This just makes it faster because we\n # can re-use the result of the sigmoid function stored in\n # the output variable\n error_term = error_term_formula(y, output)\n\n # The gradient descent step, the error times the gradient times the inputs\n del_w += error_term * x\n\n # Update the weights here. The learning rate times the \n # change in weights, divided by the number of records to average\n weights += learnrate * del_w / n_records\n\n # Printing out the mean square error on the training set\n if e % (epochs / 10) == 0:\n out = sigmoid(np.dot(features, weights))\n loss = np.mean((out - targets) ** 2)\n print(\"Epoch:\", e)\n if last_loss and last_loss < loss:\n print(\"Train loss: \", loss, \" WARNING - Loss Increasing\")\n else:\n print(\"Train loss: \", loss)\n last_loss = loss\n print(\"=========\")\n print(\"Finished training!\")\n return weights\n \nweights = train_nn(features, targets, epochs, learnrate)",
"Epoch: 0\nTrain loss: 0.27401059160875035\n=========\nEpoch: 100\nTrain loss: 0.21261374801232055\n=========\nEpoch: 200\nTrain loss: 0.2106078884950585\n=========\nEpoch: 300\nTrain loss: 0.20972566627771627\n=========\nEpoch: 400\nTrain loss: 0.20927491873140464\n=========\nEpoch: 500\nTrain loss: 0.2089967440766021\n=========\nEpoch: 600\nTrain loss: 0.20879224405576388\n=========\nEpoch: 700\nTrain loss: 0.20862188043381932\n=========\nEpoch: 800\nTrain loss: 0.20846897977440876\n=========\nEpoch: 900\nTrain loss: 0.20832612066302048\n=========\nFinished training!\n"
]
],
[
[
"## Calculating the Accuracy on the Test Data",
"_____no_output_____"
]
],
[
[
"# Calculate accuracy on test data\ntes_out = sigmoid(np.dot(features_test, weights))\npredictions = tes_out > 0.5\naccuracy = np.mean(predictions == targets_test)\nprint(\"Prediction accuracy: {:.3f}\".format(accuracy))",
"Prediction accuracy: 0.750\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a62a0c1a9379dc5fed355814118ae7b72b8ab88
| 287,435 |
ipynb
|
Jupyter Notebook
|
.ipynb_checkpoints/FaceMask-checkpoint.ipynb
|
evanezcent/Face-Mask-Detection
|
8092f2031a0b7c6f529bae58fdf7254ba2adcdcf
|
[
"MIT"
] | 1 |
2020-12-30T16:44:27.000Z
|
2020-12-30T16:44:27.000Z
|
.ipynb_checkpoints/FaceMask-checkpoint.ipynb
|
evanezcent/Face-Mask-Detection
|
8092f2031a0b7c6f529bae58fdf7254ba2adcdcf
|
[
"MIT"
] | null | null | null |
.ipynb_checkpoints/FaceMask-checkpoint.ipynb
|
evanezcent/Face-Mask-Detection
|
8092f2031a0b7c6f529bae58fdf7254ba2adcdcf
|
[
"MIT"
] | null | null | null | 240.531381 | 176,668 | 0.893339 |
[
[
[
"import tensorflow as tf\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\nfrom tensorflow.keras.preprocessing import image\nfrom tensorflow.keras.applications import MobileNetV2\nfrom tensorflow.keras.layers import AveragePooling2D\nfrom tensorflow.keras.layers import Dropout\nfrom tensorflow.keras.layers import Flatten\nfrom tensorflow.keras.layers import Dense\nfrom tensorflow.keras.layers import Input\nfrom tensorflow.keras.models import Model, load_model\nfrom tensorflow.keras.optimizers import Adam\nfrom tensorflow.keras.applications.mobilenet_v2 import preprocess_input\nfrom tensorflow.keras.preprocessing.image import img_to_array\nfrom tensorflow.keras.preprocessing.image import load_img\nfrom tensorflow.keras.preprocessing import image\nfrom tensorflow.keras.utils import to_categorical\nfrom sklearn.preprocessing import LabelBinarizer\nfrom sklearn.metrics import classification_report\nfrom imutils import paths\nimport matplotlib.pyplot as plt\nplt.style.use(\"seaborn\")\nimport numpy as np\nfrom numpy import expand_dims\nimport pandas as pd\nimport random\nfrom pathlib import Path\nfrom IPython.display import display\nfrom PIL import Image\nimport pickle\nimport glob\nimport os\nimport cv2",
"_____no_output_____"
],
[
"from google.colab import drive\ndrive.mount('/drive')",
"Mounted at /drive\n"
],
[
"os.listdir('/drive/My Drive/FaceMask/data')",
"_____no_output_____"
],
[
"def loadData(path,dataFrame):\n data = []\n for i in range(len(dataFrame)):\n data.append(path+dataFrame['filename'][i])\n return data",
"_____no_output_____"
],
[
"def loadImages(listPath, img_size):\n images = []\n for img in listPath:\n z= image.load_img(img,target_size=img_size)\n r = image.img_to_array(z)\n r = preprocess_input(r)\n images.append(r)\n \n return np.array(images)",
"_____no_output_____"
],
[
"def loadLabels(dataFrame):\n labels = []\n for row in range(len(dataFrame)):\n if dataFrame[\"class\"][row] == 'with_mask':\n y= [1.0, 0.0]\n else:\n y=[0.0, 1.0]\n\n labels.append(y)\n\n return np.array(labels,dtype=\"float32\")",
"_____no_output_____"
]
],
[
[
"##### Load data train path",
"_____no_output_____"
]
],
[
[
"path = \"/drive/My Drive/FaceMask/data/train/\"\ntrain_csv_df = pd.DataFrame(pd.read_csv(\"/drive/My Drive/FaceMask/data/train.csv\"))\ntrain_csv_df.head()",
"_____no_output_____"
],
[
"imgPath = loadData(path,train_csv_df)",
"_____no_output_____"
]
],
[
[
"##### Load data test path",
"_____no_output_____"
]
],
[
[
"testPath = \"/drive/My Drive/FaceMask/data/test/\"\ntest_csv_df = pd.DataFrame(pd.read_csv(\"/drive/My Drive/FaceMask/data/test.csv\"))\ntest_csv_df.head()",
"_____no_output_____"
],
[
"imgTest = loadData(testPath,test_csv_df)",
"_____no_output_____"
]
],
[
[
"### Get data train and data test",
"_____no_output_____"
]
],
[
[
"train_images_array = loadImages(imgPath, (300,300))\ntest_images_array = loadImages(imgTest, (224,224))",
"_____no_output_____"
]
],
[
[
"### Get the labels",
"_____no_output_____"
]
],
[
[
"train_labels_array = loadLabels(train_csv_df)\ntest_labels_array = loadLabels(test_csv_df)",
"_____no_output_____"
]
],
[
[
"### Augmentasi data train",
"_____no_output_____"
]
],
[
[
"aug = ImageDataGenerator(\n rotation_range=20,\n zoom_range=0.15,\n width_shift_range=0.2,\n height_shift_range=0.2,\n shear_range=0.15,\n horizontal_flip=True,\n fill_mode=\"nearest\")",
"_____no_output_____"
],
[
"# Loading the MobileNetV2 network, with the topmost layer removed\n\nbase_model = MobileNetV2(weights=\"imagenet\", include_top=False,\n input_tensor=Input(shape=(224, 224, 3)))\n\n\n# Freeze the layer of the base model to make them untrainable.\n# This ensures that their weights are not updated when we train the model.\n\nfor layer in base_model.layers:\n layer.trainable = False\n \n# Construct head of the model that will be attached on top of the base model:\n\nhead_model = base_model.output\nhead_model = AveragePooling2D(pool_size=(7, 7))(head_model)\nhead_model = Flatten(name=\"flatten\")(head_model)\nhead_model = Dense(128, activation=\"relu\")(head_model)\nhead_model = Dropout(0.5)(head_model)\nhead_model = Dense(2, activation=\"softmax\")(head_model)\n\n\n# Combine the head and base of the models together:\n\nmy_model = Model(inputs=base_model.input, outputs=head_model)",
"WARNING:tensorflow:`input_shape` is undefined or non-square, or `rows` is not in [96, 128, 160, 192, 224]. Weights for input shape (224, 224) will be loaded as the default.\n"
],
[
"my_model.summary()",
"Model: \"functional_5\"\n__________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n==================================================================================================\ninput_3 (InputLayer) [(None, 224, 224, 3) 0 \n__________________________________________________________________________________________________\nConv1_pad (ZeroPadding2D) (None, 225, 225, 3) 0 input_3[0][0] \n__________________________________________________________________________________________________\nConv1 (Conv2D) (None, 112, 112, 32) 864 Conv1_pad[0][0] \n__________________________________________________________________________________________________\nbn_Conv1 (BatchNormalization) (None, 112, 112, 32) 128 Conv1[0][0] \n__________________________________________________________________________________________________\nConv1_relu (ReLU) (None, 112, 112, 32) 0 bn_Conv1[0][0] \n__________________________________________________________________________________________________\nexpanded_conv_depthwise (Depthw (None, 112, 112, 32) 288 Conv1_relu[0][0] \n__________________________________________________________________________________________________\nexpanded_conv_depthwise_BN (Bat (None, 112, 112, 32) 128 expanded_conv_depthwise[0][0] \n__________________________________________________________________________________________________\nexpanded_conv_depthwise_relu (R (None, 112, 112, 32) 0 expanded_conv_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nexpanded_conv_project (Conv2D) (None, 112, 112, 16) 512 expanded_conv_depthwise_relu[0][0\n__________________________________________________________________________________________________\nexpanded_conv_project_BN (Batch (None, 112, 112, 16) 64 expanded_conv_project[0][0] \n__________________________________________________________________________________________________\nblock_1_expand (Conv2D) (None, 112, 112, 96) 1536 expanded_conv_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_1_expand_BN (BatchNormali (None, 112, 112, 96) 384 block_1_expand[0][0] \n__________________________________________________________________________________________________\nblock_1_expand_relu (ReLU) (None, 112, 112, 96) 0 block_1_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_1_pad (ZeroPadding2D) (None, 113, 113, 96) 0 block_1_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_1_depthwise (DepthwiseCon (None, 56, 56, 96) 864 block_1_pad[0][0] \n__________________________________________________________________________________________________\nblock_1_depthwise_BN (BatchNorm (None, 56, 56, 96) 384 block_1_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_1_depthwise_relu (ReLU) (None, 56, 56, 96) 0 block_1_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_1_project (Conv2D) (None, 56, 56, 24) 2304 block_1_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_1_project_BN (BatchNormal (None, 56, 56, 24) 96 block_1_project[0][0] \n__________________________________________________________________________________________________\nblock_2_expand (Conv2D) (None, 56, 56, 144) 3456 block_1_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_2_expand_BN (BatchNormali (None, 56, 56, 144) 576 block_2_expand[0][0] \n__________________________________________________________________________________________________\nblock_2_expand_relu (ReLU) (None, 56, 56, 144) 0 block_2_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_2_depthwise (DepthwiseCon (None, 56, 56, 144) 1296 block_2_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_2_depthwise_BN (BatchNorm (None, 56, 56, 144) 576 block_2_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_2_depthwise_relu (ReLU) (None, 56, 56, 144) 0 block_2_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_2_project (Conv2D) (None, 56, 56, 24) 3456 block_2_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_2_project_BN (BatchNormal (None, 56, 56, 24) 96 block_2_project[0][0] \n__________________________________________________________________________________________________\nblock_2_add (Add) (None, 56, 56, 24) 0 block_1_project_BN[0][0] \n block_2_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_3_expand (Conv2D) (None, 56, 56, 144) 3456 block_2_add[0][0] \n__________________________________________________________________________________________________\nblock_3_expand_BN (BatchNormali (None, 56, 56, 144) 576 block_3_expand[0][0] \n__________________________________________________________________________________________________\nblock_3_expand_relu (ReLU) (None, 56, 56, 144) 0 block_3_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_3_pad (ZeroPadding2D) (None, 57, 57, 144) 0 block_3_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_3_depthwise (DepthwiseCon (None, 28, 28, 144) 1296 block_3_pad[0][0] \n__________________________________________________________________________________________________\nblock_3_depthwise_BN (BatchNorm (None, 28, 28, 144) 576 block_3_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_3_depthwise_relu (ReLU) (None, 28, 28, 144) 0 block_3_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_3_project (Conv2D) (None, 28, 28, 32) 4608 block_3_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_3_project_BN (BatchNormal (None, 28, 28, 32) 128 block_3_project[0][0] \n__________________________________________________________________________________________________\nblock_4_expand (Conv2D) (None, 28, 28, 192) 6144 block_3_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_4_expand_BN (BatchNormali (None, 28, 28, 192) 768 block_4_expand[0][0] \n__________________________________________________________________________________________________\nblock_4_expand_relu (ReLU) (None, 28, 28, 192) 0 block_4_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_4_depthwise (DepthwiseCon (None, 28, 28, 192) 1728 block_4_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_4_depthwise_BN (BatchNorm (None, 28, 28, 192) 768 block_4_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_4_depthwise_relu (ReLU) (None, 28, 28, 192) 0 block_4_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_4_project (Conv2D) (None, 28, 28, 32) 6144 block_4_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_4_project_BN (BatchNormal (None, 28, 28, 32) 128 block_4_project[0][0] \n__________________________________________________________________________________________________\nblock_4_add (Add) (None, 28, 28, 32) 0 block_3_project_BN[0][0] \n block_4_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_5_expand (Conv2D) (None, 28, 28, 192) 6144 block_4_add[0][0] \n__________________________________________________________________________________________________\nblock_5_expand_BN (BatchNormali (None, 28, 28, 192) 768 block_5_expand[0][0] \n__________________________________________________________________________________________________\nblock_5_expand_relu (ReLU) (None, 28, 28, 192) 0 block_5_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_5_depthwise (DepthwiseCon (None, 28, 28, 192) 1728 block_5_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_5_depthwise_BN (BatchNorm (None, 28, 28, 192) 768 block_5_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_5_depthwise_relu (ReLU) (None, 28, 28, 192) 0 block_5_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_5_project (Conv2D) (None, 28, 28, 32) 6144 block_5_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_5_project_BN (BatchNormal (None, 28, 28, 32) 128 block_5_project[0][0] \n__________________________________________________________________________________________________\nblock_5_add (Add) (None, 28, 28, 32) 0 block_4_add[0][0] \n block_5_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_6_expand (Conv2D) (None, 28, 28, 192) 6144 block_5_add[0][0] \n__________________________________________________________________________________________________\nblock_6_expand_BN (BatchNormali (None, 28, 28, 192) 768 block_6_expand[0][0] \n__________________________________________________________________________________________________\nblock_6_expand_relu (ReLU) (None, 28, 28, 192) 0 block_6_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_6_pad (ZeroPadding2D) (None, 29, 29, 192) 0 block_6_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_6_depthwise (DepthwiseCon (None, 14, 14, 192) 1728 block_6_pad[0][0] \n__________________________________________________________________________________________________\nblock_6_depthwise_BN (BatchNorm (None, 14, 14, 192) 768 block_6_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_6_depthwise_relu (ReLU) (None, 14, 14, 192) 0 block_6_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_6_project (Conv2D) (None, 14, 14, 64) 12288 block_6_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_6_project_BN (BatchNormal (None, 14, 14, 64) 256 block_6_project[0][0] \n__________________________________________________________________________________________________\nblock_7_expand (Conv2D) (None, 14, 14, 384) 24576 block_6_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_7_expand_BN (BatchNormali (None, 14, 14, 384) 1536 block_7_expand[0][0] \n__________________________________________________________________________________________________\nblock_7_expand_relu (ReLU) (None, 14, 14, 384) 0 block_7_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_7_depthwise (DepthwiseCon (None, 14, 14, 384) 3456 block_7_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_7_depthwise_BN (BatchNorm (None, 14, 14, 384) 1536 block_7_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_7_depthwise_relu (ReLU) (None, 14, 14, 384) 0 block_7_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_7_project (Conv2D) (None, 14, 14, 64) 24576 block_7_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_7_project_BN (BatchNormal (None, 14, 14, 64) 256 block_7_project[0][0] \n__________________________________________________________________________________________________\nblock_7_add (Add) (None, 14, 14, 64) 0 block_6_project_BN[0][0] \n block_7_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_8_expand (Conv2D) (None, 14, 14, 384) 24576 block_7_add[0][0] \n__________________________________________________________________________________________________\nblock_8_expand_BN (BatchNormali (None, 14, 14, 384) 1536 block_8_expand[0][0] \n__________________________________________________________________________________________________\nblock_8_expand_relu (ReLU) (None, 14, 14, 384) 0 block_8_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_8_depthwise (DepthwiseCon (None, 14, 14, 384) 3456 block_8_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_8_depthwise_BN (BatchNorm (None, 14, 14, 384) 1536 block_8_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_8_depthwise_relu (ReLU) (None, 14, 14, 384) 0 block_8_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_8_project (Conv2D) (None, 14, 14, 64) 24576 block_8_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_8_project_BN (BatchNormal (None, 14, 14, 64) 256 block_8_project[0][0] \n__________________________________________________________________________________________________\nblock_8_add (Add) (None, 14, 14, 64) 0 block_7_add[0][0] \n block_8_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_9_expand (Conv2D) (None, 14, 14, 384) 24576 block_8_add[0][0] \n__________________________________________________________________________________________________\nblock_9_expand_BN (BatchNormali (None, 14, 14, 384) 1536 block_9_expand[0][0] \n__________________________________________________________________________________________________\nblock_9_expand_relu (ReLU) (None, 14, 14, 384) 0 block_9_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_9_depthwise (DepthwiseCon (None, 14, 14, 384) 3456 block_9_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_9_depthwise_BN (BatchNorm (None, 14, 14, 384) 1536 block_9_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_9_depthwise_relu (ReLU) (None, 14, 14, 384) 0 block_9_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_9_project (Conv2D) (None, 14, 14, 64) 24576 block_9_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_9_project_BN (BatchNormal (None, 14, 14, 64) 256 block_9_project[0][0] \n__________________________________________________________________________________________________\nblock_9_add (Add) (None, 14, 14, 64) 0 block_8_add[0][0] \n block_9_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_10_expand (Conv2D) (None, 14, 14, 384) 24576 block_9_add[0][0] \n__________________________________________________________________________________________________\nblock_10_expand_BN (BatchNormal (None, 14, 14, 384) 1536 block_10_expand[0][0] \n__________________________________________________________________________________________________\nblock_10_expand_relu (ReLU) (None, 14, 14, 384) 0 block_10_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_10_depthwise (DepthwiseCo (None, 14, 14, 384) 3456 block_10_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_10_depthwise_BN (BatchNor (None, 14, 14, 384) 1536 block_10_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_10_depthwise_relu (ReLU) (None, 14, 14, 384) 0 block_10_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_10_project (Conv2D) (None, 14, 14, 96) 36864 block_10_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_10_project_BN (BatchNorma (None, 14, 14, 96) 384 block_10_project[0][0] \n__________________________________________________________________________________________________\nblock_11_expand (Conv2D) (None, 14, 14, 576) 55296 block_10_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_11_expand_BN (BatchNormal (None, 14, 14, 576) 2304 block_11_expand[0][0] \n__________________________________________________________________________________________________\nblock_11_expand_relu (ReLU) (None, 14, 14, 576) 0 block_11_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_11_depthwise (DepthwiseCo (None, 14, 14, 576) 5184 block_11_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_11_depthwise_BN (BatchNor (None, 14, 14, 576) 2304 block_11_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_11_depthwise_relu (ReLU) (None, 14, 14, 576) 0 block_11_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_11_project (Conv2D) (None, 14, 14, 96) 55296 block_11_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_11_project_BN (BatchNorma (None, 14, 14, 96) 384 block_11_project[0][0] \n__________________________________________________________________________________________________\nblock_11_add (Add) (None, 14, 14, 96) 0 block_10_project_BN[0][0] \n block_11_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_12_expand (Conv2D) (None, 14, 14, 576) 55296 block_11_add[0][0] \n__________________________________________________________________________________________________\nblock_12_expand_BN (BatchNormal (None, 14, 14, 576) 2304 block_12_expand[0][0] \n__________________________________________________________________________________________________\nblock_12_expand_relu (ReLU) (None, 14, 14, 576) 0 block_12_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_12_depthwise (DepthwiseCo (None, 14, 14, 576) 5184 block_12_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_12_depthwise_BN (BatchNor (None, 14, 14, 576) 2304 block_12_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_12_depthwise_relu (ReLU) (None, 14, 14, 576) 0 block_12_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_12_project (Conv2D) (None, 14, 14, 96) 55296 block_12_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_12_project_BN (BatchNorma (None, 14, 14, 96) 384 block_12_project[0][0] \n__________________________________________________________________________________________________\nblock_12_add (Add) (None, 14, 14, 96) 0 block_11_add[0][0] \n block_12_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_13_expand (Conv2D) (None, 14, 14, 576) 55296 block_12_add[0][0] \n__________________________________________________________________________________________________\nblock_13_expand_BN (BatchNormal (None, 14, 14, 576) 2304 block_13_expand[0][0] \n__________________________________________________________________________________________________\nblock_13_expand_relu (ReLU) (None, 14, 14, 576) 0 block_13_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_13_pad (ZeroPadding2D) (None, 15, 15, 576) 0 block_13_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_13_depthwise (DepthwiseCo (None, 7, 7, 576) 5184 block_13_pad[0][0] \n__________________________________________________________________________________________________\nblock_13_depthwise_BN (BatchNor (None, 7, 7, 576) 2304 block_13_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_13_depthwise_relu (ReLU) (None, 7, 7, 576) 0 block_13_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_13_project (Conv2D) (None, 7, 7, 160) 92160 block_13_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_13_project_BN (BatchNorma (None, 7, 7, 160) 640 block_13_project[0][0] \n__________________________________________________________________________________________________\nblock_14_expand (Conv2D) (None, 7, 7, 960) 153600 block_13_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_14_expand_BN (BatchNormal (None, 7, 7, 960) 3840 block_14_expand[0][0] \n__________________________________________________________________________________________________\nblock_14_expand_relu (ReLU) (None, 7, 7, 960) 0 block_14_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_14_depthwise (DepthwiseCo (None, 7, 7, 960) 8640 block_14_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_14_depthwise_BN (BatchNor (None, 7, 7, 960) 3840 block_14_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_14_depthwise_relu (ReLU) (None, 7, 7, 960) 0 block_14_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_14_project (Conv2D) (None, 7, 7, 160) 153600 block_14_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_14_project_BN (BatchNorma (None, 7, 7, 160) 640 block_14_project[0][0] \n__________________________________________________________________________________________________\nblock_14_add (Add) (None, 7, 7, 160) 0 block_13_project_BN[0][0] \n block_14_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_15_expand (Conv2D) (None, 7, 7, 960) 153600 block_14_add[0][0] \n__________________________________________________________________________________________________\nblock_15_expand_BN (BatchNormal (None, 7, 7, 960) 3840 block_15_expand[0][0] \n__________________________________________________________________________________________________\nblock_15_expand_relu (ReLU) (None, 7, 7, 960) 0 block_15_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_15_depthwise (DepthwiseCo (None, 7, 7, 960) 8640 block_15_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_15_depthwise_BN (BatchNor (None, 7, 7, 960) 3840 block_15_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_15_depthwise_relu (ReLU) (None, 7, 7, 960) 0 block_15_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_15_project (Conv2D) (None, 7, 7, 160) 153600 block_15_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_15_project_BN (BatchNorma (None, 7, 7, 160) 640 block_15_project[0][0] \n__________________________________________________________________________________________________\nblock_15_add (Add) (None, 7, 7, 160) 0 block_14_add[0][0] \n block_15_project_BN[0][0] \n__________________________________________________________________________________________________\nblock_16_expand (Conv2D) (None, 7, 7, 960) 153600 block_15_add[0][0] \n__________________________________________________________________________________________________\nblock_16_expand_BN (BatchNormal (None, 7, 7, 960) 3840 block_16_expand[0][0] \n__________________________________________________________________________________________________\nblock_16_expand_relu (ReLU) (None, 7, 7, 960) 0 block_16_expand_BN[0][0] \n__________________________________________________________________________________________________\nblock_16_depthwise (DepthwiseCo (None, 7, 7, 960) 8640 block_16_expand_relu[0][0] \n__________________________________________________________________________________________________\nblock_16_depthwise_BN (BatchNor (None, 7, 7, 960) 3840 block_16_depthwise[0][0] \n__________________________________________________________________________________________________\nblock_16_depthwise_relu (ReLU) (None, 7, 7, 960) 0 block_16_depthwise_BN[0][0] \n__________________________________________________________________________________________________\nblock_16_project (Conv2D) (None, 7, 7, 320) 307200 block_16_depthwise_relu[0][0] \n__________________________________________________________________________________________________\nblock_16_project_BN (BatchNorma (None, 7, 7, 320) 1280 block_16_project[0][0] \n__________________________________________________________________________________________________\nConv_1 (Conv2D) (None, 7, 7, 1280) 409600 block_16_project_BN[0][0] \n__________________________________________________________________________________________________\nConv_1_bn (BatchNormalization) (None, 7, 7, 1280) 5120 Conv_1[0][0] \n__________________________________________________________________________________________________\nout_relu (ReLU) (None, 7, 7, 1280) 0 Conv_1_bn[0][0] \n__________________________________________________________________________________________________\naverage_pooling2d_2 (AveragePoo (None, 1, 1, 1280) 0 out_relu[0][0] \n__________________________________________________________________________________________________\nflatten (Flatten) (None, 1280) 0 average_pooling2d_2[0][0] \n__________________________________________________________________________________________________\ndense_4 (Dense) (None, 128) 163968 flatten[0][0] \n__________________________________________________________________________________________________\ndropout_2 (Dropout) (None, 128) 0 dense_4[0][0] \n__________________________________________________________________________________________________\ndense_5 (Dense) (None, 2) 258 dropout_2[0][0] \n==================================================================================================\nTotal params: 2,422,210\nTrainable params: 164,226\nNon-trainable params: 2,257,984\n__________________________________________________________________________________________________\n"
],
[
"INIT_LR = 1e-4\nEPOCHS = 20\nBATCH_SIZE = 32\nopt = Adam(lr=INIT_LR, decay=INIT_LR / EPOCHS)\nmy_model.compile(loss=\"binary_crossentropy\", optimizer=opt,\n metrics=[\"accuracy\"])",
"_____no_output_____"
],
[
"history = my_model.fit(\n aug.flow(train_images_array, train_labels_array, batch_size=BATCH_SIZE),\n steps_per_epoch=len(train_images_array) // BATCH_SIZE,\n validation_data=(test_images_array, test_labels_array),\n validation_steps=len(test_images_array)//BATCH_SIZE,\n epochs=EPOCHS)",
"Epoch 1/20\n36/36 [==============================] - 21s 582ms/step - loss: 0.7662 - accuracy: 0.5925 - val_loss: 0.2824 - val_accuracy: 0.9381\nEpoch 2/20\n36/36 [==============================] - 20s 545ms/step - loss: 0.5428 - accuracy: 0.7225 - val_loss: 0.2109 - val_accuracy: 0.9588\nEpoch 3/20\n36/36 [==============================] - 20s 545ms/step - loss: 0.5269 - accuracy: 0.7373 - val_loss: 0.1771 - val_accuracy: 0.9742\nEpoch 4/20\n36/36 [==============================] - 20s 546ms/step - loss: 0.5012 - accuracy: 0.7461 - val_loss: 0.1614 - val_accuracy: 0.9691\nEpoch 5/20\n36/36 [==============================] - 20s 551ms/step - loss: 0.4767 - accuracy: 0.7696 - val_loss: 0.1436 - val_accuracy: 0.9742\nEpoch 6/20\n36/36 [==============================] - 20s 553ms/step - loss: 0.4834 - accuracy: 0.7674 - val_loss: 0.1646 - val_accuracy: 0.9691\nEpoch 7/20\n36/36 [==============================] - 20s 552ms/step - loss: 0.4722 - accuracy: 0.7644 - val_loss: 0.1665 - val_accuracy: 0.9691\nEpoch 8/20\n36/36 [==============================] - 20s 554ms/step - loss: 0.4670 - accuracy: 0.7504 - val_loss: 0.1270 - val_accuracy: 0.9794\nEpoch 9/20\n36/36 [==============================] - 20s 552ms/step - loss: 0.4641 - accuracy: 0.7705 - val_loss: 0.1587 - val_accuracy: 0.9742\nEpoch 10/20\n36/36 [==============================] - 20s 551ms/step - loss: 0.4583 - accuracy: 0.7635 - val_loss: 0.1331 - val_accuracy: 0.9794\nEpoch 11/20\n36/36 [==============================] - 20s 547ms/step - loss: 0.4431 - accuracy: 0.7731 - val_loss: 0.1535 - val_accuracy: 0.9742\nEpoch 12/20\n36/36 [==============================] - 20s 554ms/step - loss: 0.4602 - accuracy: 0.7557 - val_loss: 0.1494 - val_accuracy: 0.9691\nEpoch 13/20\n36/36 [==============================] - 20s 550ms/step - loss: 0.4439 - accuracy: 0.7644 - val_loss: 0.1341 - val_accuracy: 0.9794\nEpoch 14/20\n36/36 [==============================] - 20s 553ms/step - loss: 0.4383 - accuracy: 0.7827 - val_loss: 0.1468 - val_accuracy: 0.9742\nEpoch 15/20\n36/36 [==============================] - 20s 551ms/step - loss: 0.4259 - accuracy: 0.7923 - val_loss: 0.1269 - val_accuracy: 0.9742\nEpoch 16/20\n36/36 [==============================] - 20s 556ms/step - loss: 0.4429 - accuracy: 0.7661 - val_loss: 0.1258 - val_accuracy: 0.9794\nEpoch 17/20\n36/36 [==============================] - 20s 555ms/step - loss: 0.4291 - accuracy: 0.7827 - val_loss: 0.1334 - val_accuracy: 0.9742\nEpoch 18/20\n36/36 [==============================] - 20s 555ms/step - loss: 0.4294 - accuracy: 0.7888 - val_loss: 0.1286 - val_accuracy: 0.9794\nEpoch 19/20\n36/36 [==============================] - 20s 552ms/step - loss: 0.4090 - accuracy: 0.8002 - val_loss: 0.1180 - val_accuracy: 0.9845\nEpoch 20/20\n36/36 [==============================] - 20s 555ms/step - loss: 0.4180 - accuracy: 0.7801 - val_loss: 0.1100 - val_accuracy: 0.9845\n"
],
[
"my_model.save(\"/drive/My Drive/FaceMask/model.h5\")",
"_____no_output_____"
]
],
[
[
"## EVALUATE MODEL",
"_____no_output_____"
]
],
[
[
"results = my_model.evaluate(test_images_array, test_labels_array, batch_size=128)",
"2/2 [==============================] - 0s 174ms/step - loss: 0.1100 - accuracy: 0.9845\n"
]
],
[
[
"## PLOT RESULT",
"_____no_output_____"
]
],
[
[
"train_acc = history.history['accuracy']\nval_acc = history.history['val_accuracy']\n\ntrain_loss = history.history['loss']\nval_loss = history.history['val_loss']",
"_____no_output_____"
],
[
"plt.rcParams['figure.figsize'] = [10, 5]\n\n# Plot training & validation accuracy values\nplt.plot(train_acc)\nplt.plot(val_acc)\nplt.title('Model accuracy')\nplt.ylabel('Accuracy')\nplt.xlabel('Epoch')\nplt.savefig('/drive/My Drive/FaceMask/accuracy.png')\nplt.legend(['Train', 'Test'])",
"_____no_output_____"
],
[
"# Plot training & validation loss values\nplt.plot(train_loss)\nplt.plot(val_loss)\nplt.title('Model loss')\nplt.ylabel('Loss')\nplt.xlabel('Epoch')\nplt.savefig('/drive/My Drive/FaceMask/lost.png')\nplt.legend(['Train', 'Test'])",
"_____no_output_____"
]
],
[
[
"## PREDICT",
"_____no_output_____"
]
],
[
[
"my_model = load_model('D:\\PROGRAMING\\Python\\Face Mask/model.h5')",
"_____no_output_____"
],
[
"img = image.load_img('D:\\PROGRAMING\\Python\\Face Mask/1.jpg',target_size=(224,224))\nplt.imshow(img)\nplt.axis('off')\nplt.show()",
"_____no_output_____"
],
[
"img = np.array(img, dtype='float')\nimg = img.reshape(1, 224, 224, 3)\nprediksi = my_model.predict(img)\nidx = np.argmax(prediksi)\npercentage = \"%.2f\" % (prediksi[0][idx] * 100)\nprint(str(percentage)+\" %\")\nif (idx):\n print(\"Wearing Masker\")\nelse:\n print(\"Not Wearing Masker\")",
"64.68 %\nWearing Masker\n"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
4a62b681e0a7625745feac86dbce495c6fdd5113
| 4,021 |
ipynb
|
Jupyter Notebook
|
Modulo4/Quiz9.ipynb
|
Juanenriquea/porinvv2020
|
48c16439d35d30a7c0728330e66bc35669d92826
|
[
"MIT"
] | null | null | null |
Modulo4/Quiz9.ipynb
|
Juanenriquea/porinvv2020
|
48c16439d35d30a7c0728330e66bc35669d92826
|
[
"MIT"
] | null | null | null |
Modulo4/Quiz9.ipynb
|
Juanenriquea/porinvv2020
|
48c16439d35d30a7c0728330e66bc35669d92826
|
[
"MIT"
] | null | null | null | 20.105 | 79 | 0.388709 |
[
[
[
"import pandas as pd",
"_____no_output_____"
],
[
"# betaA = covAM / varM = corrAM * volA / volM\ntabla = pd.DataFrame({'p': [0.1, 0.8, 0.10], 'precios': [40, 55, 60]})\ntabla['r'] = (tabla['precios'] - 50) / 50\ntabla",
"_____no_output_____"
],
[
"ErA = (tabla['p'] * tabla['r']).sum()\nsA = (tabla['p'] * (tabla['r'] - ErA)**2).sum()**0.5\nsA",
"_____no_output_____"
],
[
"betaA = 0.8 * sA / 0.1\nbetaA",
"_____no_output_____"
],
[
"sB = 0.12\nbetaB = 0.5 * sB / 0.1\nbetaB",
"_____no_output_____"
],
[
"0.5 * betaA + 0.5 * betaB",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a62b82577abf83b6e713958d34389c90d0ed224
| 4,963 |
ipynb
|
Jupyter Notebook
|
notebooks/plot_test_case.ipynb
|
valohai/bayesmark
|
7b4ea7705ebe01e9fa34992a98b1b9cd688052e5
|
[
"Apache-2.0"
] | 102 |
2019-09-27T02:38:52.000Z
|
2022-03-12T13:31:11.000Z
|
notebooks/plot_test_case.ipynb
|
daxiongshu/bayesmark
|
b1e3d2f4830adbddc2ec32bc51e4ef3198a13566
|
[
"Apache-2.0"
] | 17 |
2019-10-07T18:20:21.000Z
|
2022-01-03T08:19:16.000Z
|
notebooks/plot_test_case.ipynb
|
daxiongshu/bayesmark
|
b1e3d2f4830adbddc2ec32bc51e4ef3198a13566
|
[
"Apache-2.0"
] | 34 |
2019-09-27T02:38:31.000Z
|
2022-02-09T21:32:25.000Z
| 30.826087 | 121 | 0.541608 |
[
[
[
"import matplotlib.pyplot as plt\nfrom matplotlib import cm, colors, rcParams\n\nimport numpy as np\n\nimport bayesmark.constants as cc\nfrom bayesmark.path_util import abspath\nfrom bayesmark.serialize import XRSerializer\nfrom bayesmark.constants import ITER, METHOD, TEST_CASE, OBJECTIVE, VISIBLE_TO_OPT",
"_____no_output_____"
],
[
"# User settings, must specify location of the data to make plots here for this to run\nDB_ROOT = abspath(\".\")\nDBID = \"bo_example_folder\"\nmetric_for_scoring = VISIBLE_TO_OPT",
"_____no_output_____"
],
[
"# Matplotlib setup\n# Note this will put type-3 font BS in the pdfs, if it matters\nrcParams[\"mathtext.fontset\"] = \"stix\"\nrcParams[\"font.family\"] = \"STIXGeneral\"",
"_____no_output_____"
],
[
"def build_color_dict(names):\n \"\"\"Make a color dictionary to give each name a mpl color.\n \"\"\"\n norm = colors.Normalize(vmin=0, vmax=1)\n m = cm.ScalarMappable(norm, cm.tab20)\n color_dict = m.to_rgba(np.linspace(0, 1, len(names)))\n color_dict = dict(zip(names, color_dict))\n return color_dict",
"_____no_output_____"
],
[
"# Load the data\nagg_results_ds, meta = XRSerializer.load_derived(DB_ROOT, db=DBID, key=cc.PERF_RESULTS)",
"_____no_output_____"
],
[
"# Setup for plotting\nmethod_list = agg_results_ds.coords[METHOD].values\nmethod_to_rgba = build_color_dict(method_list.tolist())",
"_____no_output_____"
],
[
"# Make the plots for inidividual test functions\nfor func_name in agg_results_ds.coords[TEST_CASE].values:\n plt.figure(figsize=(5, 5), dpi=300)\n for method_name in method_list:\n curr_ds = agg_results_ds.sel({TEST_CASE: func_name, METHOD: method_name, OBJECTIVE: metric_for_scoring})\n\n plt.fill_between(\n curr_ds.coords[ITER].values,\n curr_ds[cc.LB_MED].values,\n curr_ds[cc.UB_MED].values,\n color=method_to_rgba[method_name],\n alpha=0.5,\n )\n plt.plot(\n curr_ds.coords[ITER].values,\n curr_ds[cc.PERF_MED].values,\n color=method_to_rgba[method_name],\n label=method_name,\n marker=\".\",\n )\n plt.xlabel(\"evaluation\", fontsize=10)\n plt.ylabel(\"median score\", fontsize=10)\n plt.title(func_name)\n plt.legend(fontsize=8, bbox_to_anchor=(1.05, 1), loc=\"upper left\", borderaxespad=0.0)\n plt.grid()\n\n plt.figure(figsize=(5, 5), dpi=300)\n for method_name in method_list:\n curr_ds = agg_results_ds.sel({TEST_CASE: func_name, METHOD: method_name, OBJECTIVE: metric_for_scoring})\n\n plt.fill_between(\n curr_ds.coords[ITER].values,\n curr_ds[cc.LB_MEAN].values,\n curr_ds[cc.UB_MEAN].values,\n color=method_to_rgba[method_name],\n alpha=0.5,\n )\n plt.plot(\n curr_ds.coords[ITER].values,\n curr_ds[cc.PERF_MEAN].values,\n color=method_to_rgba[method_name],\n label=method_name,\n marker=\".\",\n )\n plt.xlabel(\"evaluation\", fontsize=10)\n plt.ylabel(\"mean score\", fontsize=10)\n plt.title(func_name)\n plt.legend(fontsize=8, bbox_to_anchor=(1.05, 1), loc=\"upper left\", borderaxespad=0.0)\n plt.grid()",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a62bdcbdabe75b985e8239efa32a6aaf61510d3
| 90,883 |
ipynb
|
Jupyter Notebook
|
celdas/Zelda-04.ipynb
|
AlfaroMiguel/zelda-deep-q-learning
|
4e1f223340347d9823143a157e5bb37bbac626f4
|
[
"Apache-2.0"
] | null | null | null |
celdas/Zelda-04.ipynb
|
AlfaroMiguel/zelda-deep-q-learning
|
4e1f223340347d9823143a157e5bb37bbac626f4
|
[
"Apache-2.0"
] | null | null | null |
celdas/Zelda-04.ipynb
|
AlfaroMiguel/zelda-deep-q-learning
|
4e1f223340347d9823143a157e5bb37bbac626f4
|
[
"Apache-2.0"
] | 1 |
2021-02-11T22:15:41.000Z
|
2021-02-11T22:15:41.000Z
| 147.298217 | 35,524 | 0.859776 |
[
[
[
"import sys \n\n# sys.path.append('GVGAI_GYM')\n\nimport gym\nimport gym_gvgai\nimport numpy as np\nimport random\nfrom IPython.display import clear_output\nfrom collections import deque\nimport progressbar\n\n\nimport tensorflow as tf\n\nfrom tensorflow import keras\nfrom tensorflow.keras import Model, Sequential\nfrom tensorflow.keras.layers import Dense, Embedding, Reshape, Input, Flatten\nfrom tensorflow.keras.optimizers import Adam\n\nfrom datetime import datetime\nfrom packaging import version\n\n# Constants\n\nIMG_H = 9\nIMG_W = 13\nSTATE_SIZE = (IMG_H, IMG_W)\n\n# Tensorboard extension\n\n# %load_ext tensorboard\n# logdir = \"logs/scalars/zelda-04\"\n# tensorboard_callback = keras.callbacks.TensorBoard(log_dir=logdir)\n# print(\"TensorFlow version: \", tf.__version__)\n# assert version.parse(tf.__version__).release[0] >= 2, \\\n# \"This notebook requires TensorFlow 2.0 or above.\"\n\nclass Agent:\n def __init__(self, enviroment):\n \n # Initialize atributes\n self._state_size = STATE_SIZE\n #self._state_size = enviroment.observation_space.n\n self._action_size = enviroment.action_space.n\n \n self.expirience_replay = deque(maxlen=2000)\n \n # Initialize discount and exploration rate\n self.gamma = 0.6\n self.epsilon = 1.0\n self.dec_epsilon_rate = 0.0001\n self.min_epsilon = 0.1\n \n # Build networks\n self.q_network = self._build_compile_model()\n self.target_network = self._build_compile_model()\n self.align_target_model()\n\n def store(self, state, action, reward, next_state, terminated):\n self.expirience_replay.append((state, action, reward, next_state, terminated))\n \n def _build_compile_model(self):\n model = Sequential()\n model.add(Flatten(input_shape=STATE_SIZE))\n model.add(Dense(30, activation='relu'))\n model.add(Dense(20, activation='relu')) \n model.add(Dense(self._action_size, activation='linear'))\n model.compile(loss='mse', optimizer=Adam(learning_rate=0.01))\n return model\n\n def align_target_model(self):\n self.target_network.set_weights(self.q_network.get_weights())\n \n def act(self, state, use_epsilon_strategy=True):\n if np.random.rand() <= self.epsilon and use_epsilon_strategy:\n self.epsilon = self.epsilon - self.dec_epsilon_rate if self.epsilon > self.min_epsilon else self.min_epsilon\n return enviroment.action_space.sample()\n \n if not use_epsilon_strategy and np.random.rand() <= self.min_epsilon:\n return enviroment.action_space.sample()\n \n tensor_state = tf.convert_to_tensor([state])\n \n q_values = self.q_network.predict(tensor_state)\n return np.argmax(q_values[0])\n\n def retrain(self, batch_size):\n minibatch = random.sample(self.expirience_replay, batch_size)\n \n for state, action, reward, next_state, terminated in minibatch:\n \n tensor_state = tf.convert_to_tensor([state])\n tensor_next_state = tf.convert_to_tensor([next_state])\n \n q_values = self.q_network.predict(tensor_state)\n \n if terminated:\n q_values[0][action] = reward\n else:\n t = self.target_network.predict(tensor_next_state)\n q_values[0][action] = reward + self.gamma * np.amax(t)\n \n self.q_network.fit(tensor_state, q_values, epochs=1, verbose=0)\n \n def save_model(self, model_name=\"models/zelda_03_q_network\"):\n self.q_network.save(model_name)\n \n def load_model(self, model_path):\n self.q_network = keras.models.load_model(model_path)\n self.align_target_model()",
"_____no_output_____"
],
[
"# Utils\n\nimport matplotlib.pyplot as plt\nfrom IPython import display\n\ndef show_state(env, step=0, name=\"\", info=\"\"):\n plt.figure(3)\n plt.clf()\n plt.imshow(env.render(mode='rgb_array'))\n plt.title(\"{} | Step: {} {}\".format(name, step, info))\n plt.axis(\"off\")\n\n display.clear_output(wait=True)\n display.display(plt.gcf())\n \ndef grayToArray(array):\n result = np.zeros((IMG_H, IMG_W))\n for i in range(int(array.shape[0]/10)):\n for j in range(int(array.shape[1]/10)):\n result[i][j] = int(array[10*i+5, 10*j+5])\n return result\n\n\ndef grayConversion(image):\n b = image[..., 0]\n g = image[..., 1]\n r = image[..., 2]\n return 0.21 * r + 0.72 * g + 0.07 * b\n\ndef reshape_state(state):\n return grayToArray(grayConversion(state))",
"_____no_output_____"
],
[
"# Train\n\nbatch_size = 500\nnum_of_episodes = 10\ntimesteps_per_episode = 1000\n\nenviroment = gym.make(\"gvgai-zelda-lvl0-v0\")\nenviroment.reset()\nagent = Agent(enviroment)\n# agent.load_model(\"models/zelda_04_q_network\")\nagent.q_network.summary()",
"Connecting to host 127.0.0.1 at port 55376 ...\nClient connected to server [OK]\nModel: \"sequential_6\"\n_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\nflatten_6 (Flatten) (None, 117) 0 \n_________________________________________________________________\ndense_18 (Dense) (None, 30) 3540 \n_________________________________________________________________\ndense_19 (Dense) (None, 20) 620 \n_________________________________________________________________\ndense_20 (Dense) (None, 6) 126 \n=================================================================\nTotal params: 4,286\nTrainable params: 4,286\nNon-trainable params: 0\n_________________________________________________________________\n"
],
[
"PLAYER_TILE_VALUE = 201\nSECONDARY_PLAYER_TILE_VALUE = 38\nKEY_TILE_VALUE = 151\nDOOR_TILE_VALUE = 57\n\nACTIONS = ['ACTION_NIL', 'ACTION_USE', 'ACTION_LEFT',\n 'ACTION_RIGHT', 'ACTION_DOWN', 'ACTION_UP']\n\ndef find_key(state: list):\n for row in state:\n for column in row:\n if column == KEY_TILE_VALUE:\n return True\n return False\n\ndef find_position(state: list, previous_position):\n player_row_index = None\n player_col_index = None\n\n if previous_position is None or previous_position[0] is None:\n for row_index, row in enumerate(state):\n for col_index, value in enumerate(row):\n if value == PLAYER_TILE_VALUE or value == SECONDARY_PLAYER_TILE_VALUE:\n player_row_index = row_index\n player_col_index = col_index\n break\n if not (player_row_index is None):\n break\n return player_row_index, player_col_index\n\n for row_offset in range(-2, 2, 1):\n for col_offset in range(-2, 2, 1):\n row_index = previous_position[0] + row_offset\n col_index = previous_position[1] + col_offset\n value = int(state[row_index][col_index])\n\n # print('row_index, col_index, value', row_index, col_index, value)\n if value == PLAYER_TILE_VALUE or value == SECONDARY_PLAYER_TILE_VALUE:\n player_row_index = row_index\n player_col_index = col_index\n break\n if not (player_row_index is None):\n break\n\n return player_row_index, player_col_index\n\ndef process_reward(state, next_state, action_id, raw_reward, is_over, info, position, next_position):\n action = ACTIONS[action_id]\n\n was_key_present = find_key(state)\n is_key_present = find_key(next_state)\n\n grabbed_the_key = was_key_present and not is_key_present\n\n is_winner = info[\"winner\"] == \"PLAYER_WINS\"\n\n if is_over and is_winner:\n return 1000\n\n if is_over and not is_winner:\n return -1000\n \n if grabbed_the_key:\n return 500\n\n if raw_reward > 0:\n return raw_reward * 10\n\n if action in ['ACTION_NIL']:\n return -500\n\n if action in ['ACTION_USE']:\n return -100\n\n has_moved = next_position[0] != position[0] or next_position[1] != position[1]\n\n if not has_moved:\n return -20\n\n return -1\n\nfor e in range(0, num_of_episodes):\n # Reset the enviroment\n state = enviroment.reset()\n state = reshape_state(state)\n position = find_position(state, None)\n \n # Initialize variables\n reward = 0\n terminated = False\n \n bar = progressbar.ProgressBar(maxval=timesteps_per_episode/10, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()])\n bar.start()\n \n for timestep in range(timesteps_per_episode):\n # Run Action\n action = agent.act(state)\n \n # Take action \n next_state, reward, terminated, info = enviroment.step(action) \n next_state = reshape_state(next_state)\n next_position = find_position(next_state, position)\n reward = process_reward(state, next_state, action, reward, terminated, info, position, next_position)\n agent.store(state, action, reward, next_state, terminated)\n \n state = next_state\n position = next_position\n \n if terminated:\n agent.align_target_model()\n break\n \n if len(agent.expirience_replay) > batch_size:\n agent.retrain(batch_size)\n \n if timestep%10 == 0:\n agent.align_target_model()\n bar.update(timestep/10 + 1)\n \n bar.finish()\n if (e + 1) % 10 == 0:\n print(\"**********************************\")\n print(\"Episode: {}\".format(e + 1))\n print(\"**********************************\")\n agent.save_model(\"models/zelda_04_q_network_{}\".format(e))",
"[========================================================================] 100%\n"
],
[
"# Play with agent\nenviroment = gym.make(\"gvgai-zelda-lvl0-v0\")\nactions_list = ['ACTION_NIL', 'ACTION_USE', 'ACTION_LEFT',\n 'ACTION_RIGHT', 'ACTION_DOWN', 'ACTION_UP']\nagent = Agent(enviroment)\n\nmodel_path = \"models/zelda_04_q_network_7\"\ntimesteps = 2000\n\nagent.load_model(model_path)\nstate = reshape_state(enviroment.reset())\n\nfor timestep in range(timesteps):\n action = agent.act(state, False)\n \n next_state, reward, terminated, info = enviroment.step(action)\n state = reshape_state(next_state)\n \n print(action, reward, terminated, info)\n show_state(enviroment, timestep, \"Zelda\", \"Action: {} Player Status: {} Terminated: {}\".format(actions_list[action], info['winner'], terminated))\n\n if terminated:\n print(\"terminated\")\n break",
"_____no_output_____"
],
[
"# !ls\n# !tar -czvf models.tar.gz models\n# from google.colab import files\n# files.download(\"models.tar.gz\")\n",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a62be3c4f5456c2ebbac1da1a1f481f7066e3d2
| 393,974 |
ipynb
|
Jupyter Notebook
|
Forecasting-Methods/3. Autoregressive models/Autoregressive models.ipynb
|
cmlimm/uni-projects
|
b63ac71cc0b971c7f035096a6bd15b0cbb5bb9f6
|
[
"MIT"
] | null | null | null |
Forecasting-Methods/3. Autoregressive models/Autoregressive models.ipynb
|
cmlimm/uni-projects
|
b63ac71cc0b971c7f035096a6bd15b0cbb5bb9f6
|
[
"MIT"
] | null | null | null |
Forecasting-Methods/3. Autoregressive models/Autoregressive models.ipynb
|
cmlimm/uni-projects
|
b63ac71cc0b971c7f035096a6bd15b0cbb5bb9f6
|
[
"MIT"
] | 1 |
2020-10-29T18:31:32.000Z
|
2020-10-29T18:31:32.000Z
| 639.568182 | 76,244 | 0.948525 |
[
[
[
"import numpy as np\nimport pandas as pd\n\nimport matplotlib.pyplot as plt\n%matplotlib inline",
"_____no_output_____"
],
[
"plt.rcParams[\"figure.figsize\"] = (12,5)",
"_____no_output_____"
],
[
"lynx = pd.read_csv('lynx.csv', index_col=0, parse_dates=True)\nlynx.columns = ['lynxes']",
"_____no_output_____"
],
[
"lynx",
"_____no_output_____"
],
[
"lynx.plot()",
"_____no_output_____"
],
[
"def ar2(series, phi1, phi2, alpha):\n n = len(series)\n values = series.values\n matrix = np.concatenate((values[1:-1], values[:-2], np.ones((n - 2, 1))), axis=1)\n \n prediction = np.dot(matrix, (phi1, phi2, alpha))\n return prediction",
"_____no_output_____"
],
[
"plt.plot(ar2(lynx, 0.5, 0.5, 0.5))\nplt.plot(lynx['lynxes'].values[2:])",
"_____no_output_____"
],
[
"from scipy.optimize import minimize",
"_____no_output_____"
],
[
"def MSE(actual, prediction):\n return np.mean((actual - prediction)**2)",
"_____no_output_____"
],
[
"minf = lambda param: MSE(lynx['lynxes'].values[2:], ar2(lynx, param[0], param[1], param[2]))",
"_____no_output_____"
],
[
"result = minimize(minf, (0.5, 0.5, 0.5))\nphi1, phi2, alpha = result.x\nresult",
"_____no_output_____"
],
[
"plt.plot(ar2(lynx, phi1, phi2, alpha))\nplt.plot(lynx['lynxes'].values[2:])",
"_____no_output_____"
],
[
"lynx['lynxes'].values",
"_____no_output_____"
],
[
"residuals = lynx['lynxes'].values[2:] - ar2(lynx, phi1, phi2, alpha)",
"_____no_output_____"
],
[
"def ma2(residuals, theta1, theta2, beta):\n n = len(residuals)\n matrix = np.concatenate(([residuals[1:-1]], [residuals[:-2]], [np.ones(n - 2)]), axis=0)\n matrix = np.transpose(matrix)\n \n prediction = np.dot(matrix, (theta1, theta2, beta))\n return prediction",
"_____no_output_____"
],
[
"plt.plot(residuals[2:])\nplt.plot(ma2(residuals, 0.5, 0.5, 0.5))",
"_____no_output_____"
],
[
"mine = lambda param: MSE(ma2(residuals, param[0], param[1], param[2]), residuals[2:])",
"_____no_output_____"
],
[
"result = minimize(mine, (0.5, 0.5, 0.5))\ntheta1, theta2, beta = result.x\nresult",
"_____no_output_____"
],
[
"plt.plot(residuals[2:])\nplt.plot(ma2(residuals, theta1, theta2, beta))",
"_____no_output_____"
],
[
"def arma22(series, phi1, phi2, alpha, theta1, theta2, beta):\n ar = ar2(series, phi1, phi2, alpha)\n residuals = np.concatenate(series.values[2:]) - ar\n ma = ma2(residuals, theta1, theta2, beta)\n \n arma = ar[2:] + ma\n return arma",
"_____no_output_____"
],
[
"plt.plot(lynx['lynxes'].values[4:])\nplt.plot(ar2(lynx, phi1, phi2, alpha)[2:])\nplt.plot(arma22(lynx, phi1, phi2, alpha, theta1, theta2, beta))",
"_____no_output_____"
],
[
"MSE(lynx['lynxes'].values[4:], ar2(lynx, phi1, phi2, alpha)[2:])",
"_____no_output_____"
],
[
"MSE(lynx['lynxes'].values[4:], arma22(lynx, phi1, phi2, alpha, theta1, theta2, beta))",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a62be9089e61bf493606852bcd68b6ea35b36c4
| 40,297 |
ipynb
|
Jupyter Notebook
|
Ultra96/notebooks/common/ultra96_pmbus.ipynb
|
schelleg/Ultra96-PYNQ
|
f280624b15947dbe6cf68a1a88eff289c5e298a7
|
[
"Apache-2.0"
] | 138 |
2018-09-26T03:33:18.000Z
|
2022-03-28T19:55:17.000Z
|
Ultra96/notebooks/common/ultra96_pmbus.ipynb
|
schelleg/Ultra96-PYNQ
|
f280624b15947dbe6cf68a1a88eff289c5e298a7
|
[
"Apache-2.0"
] | 44 |
2018-09-20T13:48:44.000Z
|
2021-12-26T08:37:40.000Z
|
Ultra96/notebooks/common/ultra96_pmbus.ipynb
|
schelleg/Ultra96-PYNQ
|
f280624b15947dbe6cf68a1a88eff289c5e298a7
|
[
"Apache-2.0"
] | 77 |
2018-09-17T21:44:58.000Z
|
2022-03-28T19:55:19.000Z
| 85.194503 | 12,844 | 0.774077 |
[
[
[
"# PMBus on the Ultra96\n\nThe Ultra96 has some support for monitoring power rails on the board using PMBus. PYNQ exposes these rails through the get_rails function that returns a dictionary of all of the rails available to be monitored.",
"_____no_output_____"
]
],
[
[
"import pynq\n\nrails = pynq.get_rails()\nrails",
"_____no_output_____"
]
],
[
[
"Depending on whether you are using Ultra96 v1 or v2, you may see a different set of rails.\n\nAs can be seen, the keys of the dictionary are the names of the voltage rails while the values are `Rail` objects which contain three sensors for the voltage, current and power.\n\nTo see how power changes under CPU load we can use the `DataRecorder` class. \nFor this example we are going to look at a specific power rail (depending on v1 or v2)\nas we load one of the CPU cores in Python.",
"_____no_output_____"
]
],
[
[
"if 'VSYS' in rails.keys():\n print(\"Recording Ultra96 v1 power...\")\n rail_name = 'VSYS'\nelif 'PSINT_FP' in rails.keys():\n print(\"Recording Ultra96 v2 power...\")\n rail_name = 'PSINT_FP'\nelse:\n raise RuntimeError(\"Cannot determine Ultra96 board version.\")\nrecorder = pynq.DataRecorder(rails[rail_name].power)",
"Recording Ultra96 v2 power...\n"
]
],
[
[
"We can now use the recorder to monitor the applied sensor. For this example we'll sample the power every half second while sleeping and performing a dummy loop.",
"_____no_output_____"
]
],
[
[
"import time\nwith recorder.record(0.5):\n time.sleep(5)\n for _ in range(10000000):\n pass\n time.sleep(5)",
"_____no_output_____"
]
],
[
[
"The `DataRecorder` exposes the sensor data as a pandas dataframe.",
"_____no_output_____"
]
],
[
[
"recorder.frame",
"_____no_output_____"
]
],
[
[
"Or by plotting the results using matplotlib.",
"_____no_output_____"
]
],
[
[
"%matplotlib inline\nrecorder.frame['{}_power'.format(rail_name)].plot()",
"_____no_output_____"
]
],
[
[
"We can get more information by using the `mark` function which will increment the invocation number without having to stop and start the recorder.",
"_____no_output_____"
]
],
[
[
"recorder.reset()\nwith recorder.record(0.5):\n time.sleep(5)\n recorder.mark()\n for _ in range(10000000):\n pass\n recorder.mark()\n time.sleep(5)\n \nrecorder.frame.plot(subplots=True)",
"_____no_output_____"
]
],
[
[
"This clearly shows the power spike when the for loop starts running.",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
4a62c40c08b252041fa9c835ad450838d1ecb47f
| 2,144 |
ipynb
|
Jupyter Notebook
|
test_install.ipynb
|
TheDeathStreaker/amld2019-graph-workshop
|
bb271470d23a8711d20dbdf956691ed79d3df61a
|
[
"MIT"
] | 31 |
2019-01-24T16:24:19.000Z
|
2020-08-24T18:27:45.000Z
|
test_install.ipynb
|
TheDeathStreaker/amld2019-graph-workshop
|
bb271470d23a8711d20dbdf956691ed79d3df61a
|
[
"MIT"
] | 2 |
2019-01-26T15:53:13.000Z
|
2019-01-28T11:37:59.000Z
|
test_install.ipynb
|
TheDeathStreaker/amld2019-graph-workshop
|
bb271470d23a8711d20dbdf956691ed79d3df61a
|
[
"MIT"
] | 18 |
2019-01-25T18:38:59.000Z
|
2021-08-07T01:45:46.000Z
| 22.568421 | 164 | 0.548507 |
[
[
[
"# [AMLD'19 Learning and Processing over Networks](https://github.com/rodrigo-pena/amld2019-graph-workshop)\n\n# Installation\n\nFollow the [installation instructions] then execute this notebook to make sure that the main packages we'll need for the workshop can at least be imported.\n\n[installation instructions]: https://github.com/rodrigo-pena/amld2019-graph-workshop#installation",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport scipy\nimport sklearn\nfrom matplotlib import pyplot as plt\nimport pandas\nimport networkx as nx\nimport osmnx\nimport cartopy\nimport pygsp",
"_____no_output_____"
],
[
"W = scipy.sparse.rand(10, 10, 0.2)\nW = W.toarray()\nW = W - np.diag(np.diag(W))\nW = W + np.transpose(W)\nG = pygsp.graphs.Graph(W)\nprint(G.d)",
"_____no_output_____"
],
[
"G = pygsp.graphs.Logo()\nG.estimate_lmax()\ng = pygsp.filters.Heat(G, scale=100)\nDELTAS = [20, 30, 1090]\ns = np.zeros(G.N)\ns[DELTAS] = 1\ns = g.filter(s)\nfig, ax = G.plot_signal(s, highlight=DELTAS)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
4a62c57521de6e0e4ce3bb8119a2873c555e03c9
| 4,055 |
ipynb
|
Jupyter Notebook
|
chapter2/homework/computer/3-29/201611680805.ipynb
|
hpishacker/python_tutorial
|
9005f0db9dae10bdc1d1c3e9e5cf2268036cd5bd
|
[
"MIT"
] | 76 |
2017-09-26T01:07:26.000Z
|
2021-02-23T03:06:25.000Z
|
chapter2/homework/computer/3-29/201611680805.ipynb
|
hpishacker/python_tutorial
|
9005f0db9dae10bdc1d1c3e9e5cf2268036cd5bd
|
[
"MIT"
] | 5 |
2017-12-10T08:40:11.000Z
|
2020-01-10T03:39:21.000Z
|
chapter2/homework/computer/3-29/201611680805.ipynb
|
hacker-14/python_tutorial
|
4a110b12aaab1313ded253f5207ff263d85e1b56
|
[
"MIT"
] | 112 |
2017-09-26T01:07:30.000Z
|
2021-11-25T19:46:51.000Z
| 21.119792 | 60 | 0.420222 |
[
[
[
"n = int(input('请输入第1个整数,以回车结束。'))\ni = 0\ntotal_n = 1\nwhile i < n:\n i = i + 1\n total_n = total_n * i\n\nm = int(input('请输入第2个整数,以回车结束。'))\ni = 0\ntotal_m = 1\nwhile i < m:\n i = i + 1\n total_m = total_m * i\n\nk = int(input('请输入第3个整数,以回车结束。'))\ni = 0\ntotal_k = 1\nwhile i < k:\n i = i + 1\n total_k = total_k *i\n\nprint('m!+n!+k!:', total_n + total_m + total_k)\n",
"请输入第1个整数,以回车结束。5\n请输入第2个整数,以回车结束。4\n请输入第3个整数,以回车结束。3\nm!+n!+k!: 150\n"
],
[
"def compute_sum(end):\n i=1\n m=1\n total_n=1\n while i<end:\n m+=2\n total_n+=1/m*pow(-1,i)\n i+=1\n return total_n\n\na=1000\nb=100000\nprint('最终的和是:', 4*compute_sum(a), 4*compute_sum(b))\n ",
"最终的和是: 3.140592653839794 3.1415826535897198\n"
],
[
"def birthday(date):\n if 1.19<date<2.19:\n return('你是水瓶座')\n elif 2.18<date<3.21:\n return('你是双鱼座')\n elif 3.20<date<4.20:\n return('你是白羊座')\n elif 4.19<date<5.21:\n return('你是金牛座')\n elif 5.20<date<6.22:\n return('你是双子座')\n elif 6.21<date<7.23:\n return('你是巨蟹座')\n elif 7.22<date<8.23:\n return('你是狮子座')\n elif 3.22<date<9.23:\n return('你是处女座')\n elif 9.22<date<10.24:\n return('你是天枰座')\n elif 10.23<date<11.23:\n return('你是天蝎座')\n elif 11.22<date<12.22:\n return('你是射手座')\n else: \n return('你是摩羯座')\n \ndate=float(input('请输入你的生日'))\nprint(birthday(date))",
"请输入你的生日3.24\n你是白羊座\n"
],
[
"def switch_word(noun):\n if noun.endwith('x','ch'):\n return('加 es')\n if noun.endwith('y'):\n return('变y为i,加es')\n else:\n return('加 s')\n \n \n \nword=input('请输入一个单数名词: ')\nprint('该单词的复数形式为: ',switch_word(word))\n",
"_____no_output_____"
],
[
"def compute_sum(x,y,z):\n total_sum=0\n while x<=y:\n x+=z\n total_sum+=x\n return total_sum\n \nm=int(input('请输入一个整数m: '))\nn=int(input('请输入一个大于m的整数n: '))\nk=int(input('请输入间隔k: '))\n\nprint('最终的和是:', compute_sum(m,n,k))\n ",
"请输入一个整数m: 8\n请输入一个大于m的整数n: 15\n请输入间隔k: 2\n最终的和是: 52\n"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code"
]
] |
4a62d7c1ebd6ecfb2a68cc64ecbfb13c40b8c358
| 81,090 |
ipynb
|
Jupyter Notebook
|
notebooks/construct_hostlib.ipynb
|
LSSTDESC/transient-host-sims
|
a9c02077d2378bd2e3391c52ef2eeecf916cf313
|
[
"BSD-3-Clause"
] | null | null | null |
notebooks/construct_hostlib.ipynb
|
LSSTDESC/transient-host-sims
|
a9c02077d2378bd2e3391c52ef2eeecf916cf313
|
[
"BSD-3-Clause"
] | null | null | null |
notebooks/construct_hostlib.ipynb
|
LSSTDESC/transient-host-sims
|
a9c02077d2378bd2e3391c52ef2eeecf916cf313
|
[
"BSD-3-Clause"
] | null | null | null | 224.626039 | 68,712 | 0.895486 |
[
[
[
"from sklearn.neighbors import NearestNeighbors\nimport numpy as np\nfrom matplotlib import pyplot as plt\n#import corner\nimport urllib\nimport os\nimport sys\n#import GCRCatalogsF\nfrom astropy.io import fits\n#from demo_funcs_local import *\nfrom sklearn.model_selection import train_test_split\nimport pandas as pd\nfrom astropy.cosmology import Planck15 as P15\nfrom astropy import units as u\nimport seaborn as sns\nimport pandas as pd",
"_____no_output_____"
],
[
"sns.set_context(\"talk\")\n\nsns.set(font_scale=2.5)\nsns.set_style(\"white\")\nsns.set_style(\"ticks\", {\"xtick.major.size\": 20, \"ytick.major.size\": 40})\nsns.set_style(\"ticks\", {\"xtick.minor.size\": 8, \"ytick.minor.size\": 8})\n\nplt.rcParams.update({\n \"text.usetex\": True,\n \"font.family\": \"sans-serif\",\n \"font.sans-serif\": [\"Helvetica\"]})\n## for Palatino and other serif fonts use:\nplt.rcParams.update({\n \"text.usetex\": True,\n \"font.family\": \"serif\",\n \"font.serif\": [\"Palatino\"],\n})\n\nsns.set_style('white', {'axes.linewidth': 1.0})\nplt.rcParams['xtick.major.size'] = 15\nplt.rcParams['ytick.major.size'] = 15\n\nplt.rcParams['xtick.minor.size'] = 10\nplt.rcParams['ytick.minor.size'] = 10\nplt.rcParams['xtick.minor.width'] = 2\nplt.rcParams['ytick.minor.width'] = 2\n\nplt.rcParams['xtick.major.width'] = 2\nplt.rcParams['ytick.major.width'] = 2\nplt.rcParams['xtick.bottom'] = True\nplt.rcParams['xtick.top'] = True\nplt.rcParams['ytick.left'] = True\nplt.rcParams['ytick.right'] = True\n\nplt.rcParams['xtick.minor.visible'] = True\nplt.rcParams['ytick.minor.visible'] = True\nplt.rcParams['xtick.direction'] = 'in'\nplt.rcParams['ytick.direction'] = 'in'",
"_____no_output_____"
],
[
"fnIa = \"./matchedGals_IaGhostlib.tar.gz\"\nfnII = \"./matchedGals_IIGhostlib.tar.gz\"\n\ndc2_Ia = pd.read_csv(fnIa)\ndc2_II = pd.read_csv(fnII)",
"_____no_output_____"
],
[
"dc2_Ia['GHOST_transientclass'] = 'SN Ia'\ndc2_II['GHOST_transientclass'] = 'SN II'\n\ndc2_full = pd.concat([dc2_Ia, dc2_II],ignore_index=True)",
"_____no_output_____"
],
[
"dc2_full['size_minor_true_x'].dropna()",
"_____no_output_____"
],
[
"#dc2_full['size_minor_true'] = dc2_full['size_minor_true_x']\n#del dc2_full['size_minor_true_x']\n#del dc2_full['size_minor_true_y']\n\ndc2_full['position_angle_true'] = dc2_full['position_angle_true_x']\ndel dc2_full['position_angle_true_x']\ndel dc2_full['position_angle_true_y']",
"_____no_output_____"
],
[
"dc2_full.columns.values",
"_____no_output_____"
],
[
"SN_list = []\nfor SN in np.unique(dc2_full['GHOST_transientclass'].values):\n SN_list.append(dc2_full[dc2_full['GHOST_transientclass'] == SN])",
"_____no_output_____"
],
[
"#g_obs, r_obs\n#Size_true, size_minor_true → a0_Sersic, b0_Sersic\n#Sersic index → n0_Sersic\n#Position_angle_true → a_rot Martine: figure out if this is the same\n \nSNCount = {}\n\nfor dc2 in SN_list:\n SN = dc2['GHOST_transientclass'].values[0]\n SN = SN.replace(\"/\", \"\")\n DF = pd.DataFrame({'VARNAMES:':['GAL:']*len(dc2['stellar_mass'].values), \n 'GALID':dc2['galaxy_id'].values, \n 'galaxy_id':dc2['galaxy_id'].values,\n 'RA_GAL':dc2['ra'].values,\n 'DEC_GAL':dc2['dec'].values,\n 'ZTRUE':dc2['PZflowredshift'].values, \n 'PZflowredshift':dc2['PZflowredshift'].values,\n 'DC2redshift':dc2['DC2redshift'].values, \n 'g_obs':dc2['Mag_true_g_lsst'].values, \n 'r_obs':dc2['Mag_true_r_lsst'].values, \n 'i_obs':dc2['Mag_true_i_lsst'].values, \n 'z_obs':dc2['Mag_true_z_lsst'].values, \n 'a0_Sersic':dc2['size_true'].values, \n 'size_true':dc2['size_true'].values, \n 'b0_Sersic':dc2['size_minor_true'].values, \n 'size_minor_true':dc2['size_minor_true'].values, \n 'n0_Sersic':dc2['totalSersicIndex'].values,\n 'totalSersicIndex':dc2['totalSersicIndex'].values, \n 'a_rot':dc2['position_angle_true'].values,\n 'position_angle_true':dc2['position_angle_true'].values,\n 'TOTAL_ELLIPTICITY':dc2['totalEllipticity'].values,\n 'LOGMASS_TRUE':np.log10(dc2['stellar_mass'].values), \n 'LOGMASS':np.log10(dc2['stellar_mass'].values), \n 'LOGMASS_OBS':np.log10(dc2['stellar_mass'].values), \n 'stellar_mass':dc2['stellar_mass'].values, \n 'STAR_FORMATION_RATE':dc2['PZflowSFRtot'].values, \n 'PZflowSFRtot':dc2['PZflowSFRtot'].values, \n 'DC2SFRtot':dc2['DC2SFRtot'].values, \n 'Mag_true_g_sdss_z0':dc2['Mag_true_g_sdss_z0'].values,\n 'Mag_true_r_sdss_z0':dc2['Mag_true_r_sdss_z0'].values,\n 'Mag_true_i_sdss_z0':dc2['Mag_true_i_sdss_z0'].values,\n 'Mag_true_z_sdss_z0':dc2['Mag_true_z_sdss_z0'].values,\n 'GHOST_objID':dc2['GHOST_objID'].values,\n 'GHOST_ra':dc2['GHOST_ra'].values,\n 'GHOST_dec':dc2['GHOST_dec'].values,\n 'GHOST_transientclass':dc2['GHOST_transientclass'].values,\n 'sersic_disk':dc2['sersic_disk'].values,\n 'sersic_bulge':dc2['sersic_bulge'].values,\n 'nn_distance':dc2['nn_distance'].values,\n 'size_bulge_true':dc2['size_bulge_true'].values,\n 'size_minor_bulge_true':dc2['size_minor_bulge_true'].values,\n 'size_disk_true':dc2['size_disk_true'].values,\n 'size_minor_disk_true':dc2['size_minor_disk_true'].values,\n 'mag_true_g_lsst':dc2['mag_true_g_lsst'].values,\n 'mag_true_r_lsst':dc2['mag_true_r_lsst'].values,\n 'mag_true_i_lsst':dc2['mag_true_i_lsst'].values,\n 'mag_true_z_lsst':dc2['mag_true_z_lsst'].values,\n 'Mag_true_g_lsst':dc2['Mag_true_g_lsst'].values,\n 'Mag_true_r_lsst':dc2['Mag_true_r_lsst'].values,\n 'Mag_true_i_lsst':dc2['Mag_true_i_lsst'].values,\n 'Mag_true_z_lsst':dc2['Mag_true_z_lsst'].values})\n SNCount[SN] = len(DF)\n #combine with original to get same names out\n DF_merged = pd.merge(DF, dc2_full,on='galaxy_id')\n DF_merged.drop_duplicates(subset=['GALID'], inplace=True)\n DF_merged.to_csv(\"%s_G.HOSTLIB\"%SN.replace(\" \", \"\"),index=False, sep=' ')",
"_____no_output_____"
],
[
"DF_merged['g_obs']",
"_____no_output_____"
],
[
"sns.set_context(\"talk\")\nplt.figure(figsize=(15,10))\n\ni = 0\ncols = sns.color_palette()\nfor SN in np.unique(dc2_full['GHOST_transientclass']):\n dc2_temp = dc2_full[dc2_full['GHOST_transientclass'] == SN]\n sns.kdeplot(data=dc2_temp, x=\"PZflowredshift\",lw=3, label=SN, color=cols[i])\n i += 1\n#plt.yscale(\"log\")\nplt.legend()\n#plt.savefig(\"7Class_HostlibHist_FracHist_NewMatching_10pct.png\",bbox_inches='tight', dpi=300)",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a62da3eae6db64e2cca1825e8dce3880eaac280
| 10,608 |
ipynb
|
Jupyter Notebook
|
Phrase_Extraction.ipynb
|
andygrass/ML-for-Good-Hackathon
|
c2a887f7c4616858e80ba5dfd2ce05bbf333510b
|
[
"Apache-2.0"
] | null | null | null |
Phrase_Extraction.ipynb
|
andygrass/ML-for-Good-Hackathon
|
c2a887f7c4616858e80ba5dfd2ce05bbf333510b
|
[
"Apache-2.0"
] | null | null | null |
Phrase_Extraction.ipynb
|
andygrass/ML-for-Good-Hackathon
|
c2a887f7c4616858e80ba5dfd2ce05bbf333510b
|
[
"Apache-2.0"
] | 1 |
2021-11-28T09:33:34.000Z
|
2021-11-28T09:33:34.000Z
| 32.539877 | 1,915 | 0.480958 |
[
[
[
"import pandas as pd\n\nroot = \"C:\\\\Users\\\\user\\\\SadafPythonCode\\\\MLHackathon\\\\ML-for-Good-Hackathon\\\\Data\\\\\"\ndf = pd.read_csv(root + 'CrisisLogger\\\\crisislogger.csv')\na = df.iloc[28,1]\na",
"_____no_output_____"
],
[
"import spacy\nnlp = spacy.load('en_core_web_sm',disable=['ner','textcat'])\n\n# function for rule 2\ndef rule2(text): \n doc = nlp(text)\n pat = [] \n # iterate over tokens\n for token in doc:\n phrase = ''\n # if the word is a subject noun or an object noun\n if (token.pos_ == 'NOUN')\\\n and (token.dep_ in ['dobj','pobj','nsubj','nsubjpass']):\n # iterate over the children nodes\n for subtoken in token.children:\n # if word is an adjective or has a compound dependency\n if (subtoken.pos_ == 'ADJ') or (subtoken.dep_ == 'compound'):\n phrase += subtoken.text + ' ' \n if len(phrase)!=0:\n phrase += token.text \n if len(phrase)!=0:\n pat.append(phrase)\n return pat\n",
"_____no_output_____"
]
],
[
[
"### Noun Phrases",
"_____no_output_____"
]
],
[
[
"rule2(a) # noun phrases",
"_____no_output_____"
]
],
[
[
"### I/We phrases",
"_____no_output_____"
]
],
[
[
"def rule2_mod(text,index):\n \n doc = nlp(text)\n\n phrase = ''\n \n for token in doc:\n \n if token.i == index:\n \n for subtoken in token.children:\n if (subtoken.pos_ == 'ADJ'):\n phrase += ' '+subtoken.text\n break\n \n return phrase\n\n# rule 1 modified function\ndef rule1_mod(text):\n \n doc = nlp(text)\n \n sent = []\n \n for token in doc:\n # root word\n if (token.pos_=='VERB'):\n \n phrase =''\n \n # only extract noun or pronoun subjects\n for sub_tok in token.lefts:\n \n if (sub_tok.dep_ in ['nsubj','nsubjpass']) and (sub_tok.pos_ in ['NOUN','PROPN','PRON']):\n \n # look for subject modifier\n adj = rule2_mod(text,sub_tok.i)\n \n phrase += adj + ' ' + sub_tok.text\n\n # save the root word of the word\n phrase += ' '+token.lemma_ \n\n # check for noun or pronoun direct objects\n for sub_tok in token.rights:\n \n if (sub_tok.dep_ in ['dobj']) and (sub_tok.pos_ in ['NOUN','PROPN']):\n \n # look for object modifier\n adj = rule2_mod(text,sub_tok.i)\n \n phrase += adj+' '+sub_tok.text\n sent.append(phrase)\n \n return sent\n\nrule1_mod(a) # I/we phrases",
"_____no_output_____"
]
],
[
[
"### Phrases with noun followed by prepositions",
"_____no_output_____"
]
],
[
[
"# rule 3 function\ndef rule3(text):\n \n doc = nlp(text)\n \n sent = []\n \n for token in doc:\n\n # look for prepositions\n if token.pos_=='ADP':\n\n phrase = ''\n \n # if its head word is a noun\n if token.head.pos_=='NOUN':\n \n # append noun and preposition to phrase\n phrase += token.head.text\n phrase += ' '+token.text\n\n # check the nodes to the right of the preposition\n for right_tok in token.rights:\n # append if it is a noun or proper noun\n if (right_tok.pos_ in ['NOUN','PROPN']):\n phrase += ' '+right_tok.text\n \n if len(phrase)>2:\n sent.append(phrase)\n \n return sent\nrule3(a) # prepositions",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a62e102224ce3dd7b735dda0e30a8bf4abb2105
| 9,350 |
ipynb
|
Jupyter Notebook
|
konzepte/06_Datentypen_Elementar.ipynb
|
wwi21ama-prog/go-intro
|
a1ceba04b8348162c123b7564e7e5db748c4112d
|
[
"MIT"
] | 1 |
2022-02-19T17:33:00.000Z
|
2022-02-19T17:33:00.000Z
|
konzepte/06_Datentypen_Elementar.ipynb
|
wwi21ama-prog/go-intro
|
a1ceba04b8348162c123b7564e7e5db748c4112d
|
[
"MIT"
] | null | null | null |
konzepte/06_Datentypen_Elementar.ipynb
|
wwi21ama-prog/go-intro
|
a1ceba04b8348162c123b7564e7e5db748c4112d
|
[
"MIT"
] | null | null | null | 22.475962 | 275 | 0.532086 |
[
[
[
"# Elementare Datentypen\n\n*Erinnerung:* Beim Deklarieren einer Variable muss man deren Datentyp angeben oder er muss eindeutig erkennbar sein.\nDie beiden folgenden Anweisungen erzeugen beide eine Variable vom Typ `int`:\n\n var a int\n b := 42\n\nBisher haben wir nur einen Datentyp benutzt: `int`. Dieser Typ steht für ganze Zahlen, also positive oder negative Zahlen ohne Nachkommastellen. Go stellt eine ganze Reihe von Datentypen bereit, für verschiedene Arten von Daten oder auch für verschiedene Datenbereiche.",
"_____no_output_____"
],
[
"## Ganzzahlige Datentypen",
"_____no_output_____"
]
],
[
[
"var i1 int // Zahl mit Vorzeichen\nvar i2 int32 // 32-Bit-Zahl mit Vorzeichen\nvar i3 int64 // 64-Bit-Zahl mit Vorzeichen\nvar i4 uint // Zahl ohne Vorzeichen\nvar i5 uint32 // 32-Bit-Zahl ohne Vorzeichen\nvar i6 uint64 // 64-Bit-Zahl ohne Vorzeichen\nvar i7 byte // Sonderfall: 8 Bit ohne Vorzeichen, meist als Buchstaben verwendet \n\ni7 := 42 // Automatische Typinferenz, hieraus wird `int`",
"_____no_output_____"
]
],
[
[
"### Maximale Wertebereiche von `int`-Datentypen\nDie meisten Datentypen haben einen festen, begrenzten Wertebereich, d.h. es gibt eine größte und eine kleinste Zahl, die man damit speichern kann.\nDiese Einschränkung liegt daran, dass eine feste Anzahl an Ziffern verwendet wird. Reichen diese Ziffern nicht mehr aus, gehen Informationen verloren. ",
"_____no_output_____"
]
],
[
[
"^uint(0) // Größter Wert für ein uint",
"_____no_output_____"
],
[
"int(^uint(0) >> 1) // Größter Wert für ein int",
"_____no_output_____"
],
[
"-int(^uint(0) >> 1)-1 // Größter Wert für ein int",
"_____no_output_____"
],
[
"^uint32(0) >> 1 // Größter Wert für ein uint32",
"_____no_output_____"
],
[
"^uint64(0) >> 1 // Größter Wert für ein int64",
"_____no_output_____"
]
],
[
[
"### Überläufe\nÜberschreitet man den maximalen Wert eines Datentypen, so geschieht ein *Überlauf*:",
"_____no_output_____"
]
],
[
[
"^uint(0)+1",
"_____no_output_____"
],
[
"int32(^uint32(0) >> 1)+1",
"_____no_output_____"
]
],
[
[
"## Fließkomma-Datentypen\nNeben den ganzzahligen Datentypen gibt es auch zwei *Gleitkomma*-Datentypen, die zur Darstellung von Kommazahlen mit einer variablen Anzahl an Nachkommastellen gedacht sind:",
"_____no_output_____"
]
],
[
[
"var f1 float32 = 42\nvar f2 float64 = 23.5",
"_____no_output_____"
]
],
[
[
"Gleitkommazahlen werden z.B. gebraucht, um Divisionen, Wurzelberechnungen etc. durchzuführen.\nSie werden intern in der Form $m \\cdot b^e$ dargestellt, d.h. z.B. ist $234,567 = 2,23456 * 10^2$.\n\nEin Problem dabei ist, dass für die Darstellung von *Mantisse* ($m$) und *Exponent* ($e$) nur eine begrenzte Anzahl an Bits zur Verfügung steht.\nDadurch ist die Genauigkeit bei der Darstellung von und Rechnung mit Gleitkommazahlen begrenzt. Die folgenden Beispiele demonstrieren das:",
"_____no_output_____"
]
],
[
[
"a, b := 5.67, 8.97\na - b",
"_____no_output_____"
],
[
"var x float64 = 1.01\nvar i float64 = 0.01\n\nfor x < 1.4 {\n println(x)\n x += i\n}",
"1.01\n1.02\n1.03\n1.04\n1.05\n1.06\n1.07\n1.08\n1.09\n1.1\n1.11\n1.12\n1.1300000000000001\n1.1400000000000001\n1.1500000000000001\n1.1600000000000001\n1.1700000000000002\n1.1800000000000002\n1.1900000000000002\n1.2000000000000002\n1.2100000000000002\n1.2200000000000002\n1.2300000000000002\n1.2400000000000002\n1.2500000000000002\n1.2600000000000002\n1.2700000000000002\n1.2800000000000002\n1.2900000000000003\n1.3000000000000003\n1.3100000000000003\n1.3200000000000003\n1.3300000000000003\n1.3400000000000003\n1.3500000000000003\n1.3600000000000003\n1.3700000000000003\n1.3800000000000003\n1.3900000000000003\n"
]
],
[
[
"## Wahrheitswerte\nEin weiterer wichtiger Datentyp sind die Wahrheitswerte `true` und `false`.\nWie der Name schon andeutet, dienen sie zur Darstellung von Auswertungen, ob etwas *wahr* oder *falsch* ist.\nBspw. ist der Vergleich `42 == 6 * 7` wahr, die Aussage `42 > 15` jedoch falsch.",
"_____no_output_____"
]
],
[
[
"var b1 bool",
"_____no_output_____"
],
[
"b1",
"_____no_output_____"
]
],
[
[
"Mit Wahrheitswerten wird z.B. bei bedingten Sprüngen und Schleifen gerechnet, um die Bedingungen auszuwerten.\nOft schreibt man Funktionen, die komplexere Zusammenhänge prüfen sollen und die einen Wert vom Typ `bool` liefern.\nAls kleines Beispiel prüft die folgende Funktion, ob ihr Parameter eine ungerade Zahl ist:",
"_____no_output_____"
]
],
[
[
"func is_odd(n int) bool {\n return n % 2 != 0\n}",
"_____no_output_____"
],
[
"is_odd(3)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
4a62e73bc6d306c697d1e9f38ec95226e6b57735
| 408,019 |
ipynb
|
Jupyter Notebook
|
Charts - Full Dataset.ipynb
|
saashimi/CPO-datascience
|
065f5b39569d4394bfa0bdbccb6619d3a8234419
|
[
"MIT"
] | null | null | null |
Charts - Full Dataset.ipynb
|
saashimi/CPO-datascience
|
065f5b39569d4394bfa0bdbccb6619d3a8234419
|
[
"MIT"
] | null | null | null |
Charts - Full Dataset.ipynb
|
saashimi/CPO-datascience
|
065f5b39569d4394bfa0bdbccb6619d3a8234419
|
[
"MIT"
] | null | null | null | 1,522.458955 | 215,632 | 0.94552 |
[
[
[
"# CPO Datascience\n\nThis program is intended for use by the Portland State University Campus Planning Office (CPO). ",
"_____no_output_____"
]
],
[
[
"#Import required packages\nimport pandas as pd\nimport statsmodels.api as sm\nimport statsmodels.formula.api as smf\nimport numpy as np\nimport datetime\nimport seaborn as sns\nimport matplotlib.pyplot as plt\n\n",
"_____no_output_____"
],
[
"def format_date(df_date):\n \"\"\"\n Splits Meeting Times and Dates into datetime objects where applicable using regex.\n \"\"\"\n df_date['Days'] = df_date['Meeting_Times'].str.extract('([^\\s]+)', expand=True)\n df_date['Start_Date'] = df_date['Meeting_Dates'].str.extract('([^\\s]+)', expand=True)\n df_date['Year'] = df_date['Term'].astype(str).str.slice(0,4)\n df_date['Quarter'] = df_date['Term'].astype(str).str.slice(5,6)\n df_date['Building'] = df_date['ROOM'].str.extract('([^\\s]+)', expand=True)\n #df_date['Start_Month'] = pd.to_datetime(df_date['Year'] + df_date['Start_Date'], format='%Y%b')\n df_date['End_Date'] = df_date['Meeting_Dates'].str.extract('(?<=-)(.*)(?= )', expand=True)\n #df_date['End_Month'] = pd.to_datetime(df_date['End_Date'], format='%b')\n df_date['Start_Time'] = df_date['Meeting_Times'].str.extract('(?<= )(.*)(?=-)', expand=True)\n df_date['Start_Time'] = pd.to_datetime(df_date['Start_Time'], format='%H%M')\n df_date['End_Time'] = df_date['Meeting_Times'].str.extract('((?<=-).*$)', expand=True)\n df_date['End_Time'] = pd.to_datetime(df_date['End_Time'], format='%H%M')\n df_date['Duration_Hr'] = ((df_date['End_Time'] - df_date['Start_Time']).dt.seconds)/3600\n #df_date = df_date.set_index(pd.DatetimeIndex(df_date['Start_Month']))\n return df_date\n\ndef format_xlist(df_xl):\n \"\"\"\n revises % capacity calculations by using Max Enrollment instead of room capacity. \n \"\"\"\n df_xl['Cap_Diff'] = np.where(df_xl['Xlst'] != '', \n df_xl['Max_Enrl'].astype(int) - df_xl['Actual_Enrl'].astype(int), \n df_xl['Room_Capacity'].astype(int) - df_xl['Actual_Enrl'].astype(int)) \n df_xl = df_xl.loc[df_xl['Room_Capacity'].astype(int) < 999]\n\n return df_xl\n\n\ndef grouped_plot_graph(df_in):\n fig, ax = plt.subplots()\n grouped = df_in.groupby(['Year', 'Quarter'])\n for key, group in grouped:\n group.plot(ax=ax, kind='scatter', x='Start_Month', y='Cap_Diff', label=key)\n plt.show()\n \ndef plot_graph(x_vals, y_vals):\n \"\"\"\n Plots the dataframe information.\n \"\"\"\n x = range(len(x_vals))\n plt.figure(figsize=(20,10)) \n N = 50\n colors = np.random.rand(N)\n sns.stripplot(x_vals, y_vals)\n plt.xticks(rotation=90)\n plt.scatter(x, y_vals, alpha=0.5, )\n plt.show()\n\ndef OLS_operations(y, X):\n #mod = smf.OLS('Cap_Diff ~ C(Dept)', data=df_data)\n mod = sm.OLS(np.asarray(y), X)\n res = mod.fit()\n print(res.summary())\n fig = plt.figure(figsize=(20,10))\n fig = sm.graphics.plot_partregress_grid(mod, fig=fig)\n plt.show()\n\n#pd.set_option('display.max_rows', None)\npd.set_option('display.max_columns', None)\ndf = pd.read_csv('data/PSU_master_classroom_91-17.csv')\ndf = df.fillna('')\n\ndf = format_date(df)\n# Avoid classes that only occur on a single day\ndf = df.loc[df['Start_Date'] != df['End_Date']]\ndf = df.loc[df['Online Instruct Method'] != 'Fully Online']\n#df = df.loc[df['Term'] == 201604]\n\n# Calculate number of days per week and treat Sunday condition\ndf['Days_Per_Week'] = df['Days'].str.len()\ndf['Room_Capacity'] = df['Room_Capacity'].apply(lambda x: x if (x != 'No Data Available') else 0)\ndf_cl = df_cl.loc[df_cl['Term'] < 201701]\ndf_cl = format_xlist(df)",
"C:\\Users\\kms22\\AppData\\Local\\Continuum\\Miniconda3\\envs\\data-science\\lib\\site-packages\\IPython\\core\\interactiveshell.py:2717: DtypeWarning: Columns (2,3) have mixed types. Specify dtype option on import or set low_memory=False.\n interactivity=interactivity, compiler=compiler, result=result)\n"
],
[
"dep_var = df_cl['Cap_Diff'] # Our dependent variable\ncategory = 'Dept'\ntest_var = df_cl['{0}'.format(category)]\n\nplot_graph(test_var, dep_var)",
"_____no_output_____"
],
[
"def main():\n pd.set_option('display.max_columns', None)\n df = pd.read_csv('data/PSU_master_classroom_91-17.csv')\n df = df.fillna('')\n df = format_date(df)\n # Avoid classes that only occur on a single day\n df = df.loc[df['Start_Date'] != df['End_Date']]\n df = df.loc[df['Online Instruct Method'] != 'Fully Online']\n df['Days_Per_Week'] = df['Days'].str.len()\n df['Room_Capacity'] = df['Room_Capacity'].apply(lambda x: x if (x != 'No Data Available') else 0)\n df_cl = format_xlist(df)\n dep_var = df_cl['Cap_Diff'] # Our dependent variable\n category = 'Building'\n test_var = df_cl['{0}'.format(category)]\n plot_graph(test_var, dep_var)\n \nmain()",
"C:\\Users\\kms22\\AppData\\Local\\Continuum\\Miniconda3\\envs\\data-science\\lib\\site-packages\\IPython\\core\\interactiveshell.py:2827: DtypeWarning: Columns (2,3) have mixed types. Specify dtype option on import or set low_memory=False.\n if self.run_code(code, result):\n"
],
[
"def main():\n pd.set_option('display.max_columns', None)\n df = pd.read_csv('data/PSU_master_classroom_91-17.csv')\n df = df.fillna('')\n df = format_date(df)\n # Avoid classes that only occur on a single day\n df = df.loc[df['Start_Date'] != df['End_Date']]\n df = df.loc[df['Online Instruct Method'] != 'Fully Online']\n df['Days_Per_Week'] = df['Days'].str.len()\n df['Room_Capacity'] = df['Room_Capacity'].apply(lambda x: x if (x != 'No Data Available') else 0)\n df_cl = format_xlist(df)\n dep_var = df_cl['Cap_Diff'] # Our dependent variable\n category = 'Class'\n test_var = df_cl['{0}'.format(category)]\n plot_graph(test_var, dep_var)\n \nmain()",
"C:\\Users\\kms22\\AppData\\Local\\Continuum\\Miniconda3\\envs\\data-science\\lib\\site-packages\\IPython\\core\\interactiveshell.py:2827: DtypeWarning: Columns (2,3) have mixed types. Specify dtype option on import or set low_memory=False.\n if self.run_code(code, result):\n"
]
]
] |
[
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
]
] |
4a62eeca77d97d0cf49b12f4c736904fd217fcd4
| 19,715 |
ipynb
|
Jupyter Notebook
|
04-python-prat/data_science/Python and Data Science _sem_spoilers.ipynb
|
opensanca/trilha-python
|
9ffadd266e22a920a3c1acfdbc6a5a4645fce9d6
|
[
"MIT"
] | 47 |
2016-05-19T22:37:18.000Z
|
2022-02-22T02:34:18.000Z
|
04-python-prat/data_science/Python and Data Science _sem_spoilers.ipynb
|
opensanca/trilha-python
|
9ffadd266e22a920a3c1acfdbc6a5a4645fce9d6
|
[
"MIT"
] | 21 |
2016-05-20T12:35:25.000Z
|
2016-07-26T00:23:33.000Z
|
04-python-prat/data_science/Python and Data Science _sem_spoilers.ipynb
|
lamenezes/python-intro
|
9ffadd266e22a920a3c1acfdbc6a5a4645fce9d6
|
[
"MIT"
] | 25 |
2016-05-19T22:52:32.000Z
|
2022-01-08T15:15:36.000Z
| 18.669508 | 128 | 0.503525 |
[
[
[
"# Trabalhando com o Jupyter\nFerramenta que permite criação de código, visualização de resultados e documentação no mesmo documento (.ipynb)\n\n**Modo de comando:** `esc` para ativar, o cursor fica inativo\n\n**Modo de edição:** `enter` para ativar, modo de inserção\n\n### Atalhos do teclado (MUITO úteis)\nPara usar os atalhos descritos abaixo a célula deve estar selecionada porém não pode estar no modo de edição. \n\n* Para entrar do modo de comando: `esc`\n\n* Criar nova célula abaixo: `b` (elow)\n* Criar nova célula acima: `a` (bove)\n\n* Recortar uma célula: `x`\n* Copiar uma célula: `c`\n* Colar uma cálula: `v`\n\n* Executar uma célula e permanecer nela mesma: `ctrl + enter`\n* Executar uma célula e mover para a próxima: `shift + enter`\n\n* ** Para ver todos os atalhos, tecle `h`**\n\n### Tipos de célula\n**Code:** Para código Python\n\n**Markdown:** Para documentação\n\nTambém existem **Raw NBConverter** e **Heading**",
"_____no_output_____"
],
[
"# Pandas (http://pandas.pydata.org/)\n* Biblioteca Python para análise de dados\n* Provê ferramentas de alta performance e fácil usabilidade para análise de dados\n\n### Como instalar\n* Anaconda (http://pandas.pydata.org/pandas-docs/stable/install.html#installing-pandas-with-anaconda)\n * Download anaconda: https://www.continuum.io/downloads\n * Instalar Anaconda: https://docs.continuum.io/anaconda/install\n * Disponível para `osx-64`, `linux-64`, `linux-32`, `win-64`, `win-32` e `Python 2.7`, `Python 3.4`, e `Python 3.5`\n * `conda install pandas`\n* Pip\n * `pip install pandas`\n",
"_____no_output_____"
],
[
"# Matplotlib (http://matplotlib.org/)\n* Biblioteca Python para plotar gráficos 2D\n\n### Como instalar\n* Anaconda (http://pandas.pydata.org/pandas-docs/stable/install.html#installing-pandas-with-anaconda)\n * Download anaconda: https://www.continuum.io/downloads\n * Instalar Anaconda: https://docs.continuum.io/anaconda/install\n * Disponível para `osx-64`, `linux-64`, `linux-32`, `win-64`, `win-32` e `Python 2.7`, `Python 3.4`, e `Python 3.5`\n * `conda install matplotlib`\n* Pip\n * `pip install matplotlib`",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport matplotlib\n%matplotlib inline",
"_____no_output_____"
]
],
[
[
"### Carregando um arquivo csv em um DataFrame do Pandas\n* `pd.DataFrame.from_csv(file_name)`\n \n Se, ao usar este comando, você se deparar com um UnicodeDecodingError, adicione o parâmetro `encoding='utf-8'`",
"_____no_output_____"
],
[
"## cast.csv",
"_____no_output_____"
],
[
"## release_dates.csv",
"_____no_output_____"
],
[
"## titles",
"_____no_output_____"
],
[
"**`df.head(n):`** \n* Visualizar as primeiras *n* linhas. \n* Default: *n = 5*.",
"_____no_output_____"
],
[
"**`df.tail(n):`** \n* Visualizar as últimas *n* linhas. \n* Default: *n = 5*.",
"_____no_output_____"
],
[
"### Quantos registros há no conjunto?\n\n**`len(df)`:**\n* Tamanho do df",
"_____no_output_____"
],
[
"### Quais são os possíveis valores para a coluna `type`?\n\n**`df[col]`:**\n* Visualizar uma coluna do df\n\nou\n\n**`df.col`:**\n* Se o nome da coluna não tiver, espaços, caracteres especiais ou for uma variável\n\n**Obs:** Ao selecionar uma coluna e manipulá-la fora de um DataFrame, a mesma é tratada como uma **Série**.",
"_____no_output_____"
],
[
"**`df[col].unique()`:** \n* Mostrar os possíveis valores de uma coluna",
"_____no_output_____"
],
[
"### Quantos atores e quantas atrizes há no conjunto?\n\n**df[col].value_counts():**\n* Contagem de quantos registros há para cada valor possível da coluna col (somente se col for categórica)",
"_____no_output_____"
],
[
"### Operações com colunas",
"_____no_output_____"
],
[
"#### Operações Aritméticas",
"_____no_output_____"
],
[
"#### Comparações",
"_____no_output_____"
],
[
"#### Filtrar",
"_____no_output_____"
],
[
"* Por valor específico de uma coluna",
"_____no_output_____"
],
[
"* Por colunas",
"_____no_output_____"
],
[
"* Por valor nulo ou não nulo",
"_____no_output_____"
],
[
"* Por vetor de booleanos",
"_____no_output_____"
],
[
"* Preencher valores nulos",
"_____no_output_____"
],
[
"Por DataFrame",
"_____no_output_____"
],
[
"Por coluna",
"_____no_output_____"
],
[
"### Quantos atores atuaram em cada ano?",
"_____no_output_____"
],
[
"### Qual foi a diferença entre o número de atores e atrizes que atuaram em cada década?",
"_____no_output_____"
],
[
"### Datas",
"_____no_output_____"
],
[
"### Quanto % dos filmes foram lançados na sexta-feira?",
"_____no_output_____"
],
[
"### Merge",
"_____no_output_____"
],
[
"### Qual o nome e ano do filme mais antigo?",
"_____no_output_____"
],
[
"### Quantos filmes são de 1960?",
"_____no_output_____"
],
[
"### Quantos filmes são de cada ano dos anos 70?",
"_____no_output_____"
],
[
"### Quantos filmes foram lançados desde o ano que você nasceu até hoje?",
"_____no_output_____"
],
[
"### Quais são os nomes dos filmes de 1906?",
"_____no_output_____"
],
[
"### Quais são os 15 nomes de filmes mais comuns?",
"_____no_output_____"
],
[
"### Em quantos filmes Judi Dench atuou?",
"_____no_output_____"
],
[
"### Liste os filmes nos quais Judi Dench atuou como o ator número 1, ordenado por ano.",
"_____no_output_____"
],
[
"### Liste os atores da versão de 1972 de Sleuth pela ordem do rank n.",
"_____no_output_____"
],
[
"### Quais atores mais atuaram em 1985?",
"_____no_output_____"
],
[
"# SciKit Learn (http://scikit-learn.org)\n\n* Biblioteca Python para mineração e análise de dados\n\n### Como instalar\n* Anaconda (http://pandas.pydata.org/pandas-docs/stable/install.html#installing-pandas-with-anaconda)\n * Download anaconda: https://www.continuum.io/downloads\n * Instalar Anaconda: https://docs.continuum.io/anaconda/install\n * Disponível para `osx-64`, `linux-64`, `linux-32`, `win-64`, `win-32` e `Python 2.7`, `Python 3.4`, e `Python 3.5`\n * `conda install scikit-learn`\n* Pip\n * `pip install -U scikit-learn`",
"_____no_output_____"
]
],
[
[
"from sklearn.tree import DecisionTreeClassifier\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.cross_validation import train_test_split\nimport pickle\nimport time\ntime1=time.strftime('%Y-%m-%d_%H-%M-%S')",
"_____no_output_____"
]
],
[
[
"### iris.csv",
"_____no_output_____"
],
[
"### Treinar modelo de Árvore de Decisão",
"_____no_output_____"
],
[
"### Salvar modelo",
"_____no_output_____"
]
],
[
[
"\n ",
"_____no_output_____"
]
],
[
[
"### Carregar modelo",
"_____no_output_____"
],
[
"### Predição para casos de teste",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
]
] |
4a62fadefa199c3ed5aeba615e1a03f0b3202718
| 25,208 |
ipynb
|
Jupyter Notebook
|
generate.ipynb
|
ageller/UniviewKilonova
|
ba5cb6aad9ea8fa658212bac8cd379f77196d772
|
[
"MIT"
] | null | null | null |
generate.ipynb
|
ageller/UniviewKilonova
|
ba5cb6aad9ea8fa658212bac8cd379f77196d772
|
[
"MIT"
] | null | null | null |
generate.ipynb
|
ageller/UniviewKilonova
|
ba5cb6aad9ea8fa658212bac8cd379f77196d772
|
[
"MIT"
] | null | null | null | 35.908832 | 536 | 0.496469 |
[
[
[
"# Uniview module for LIGO event GW170817\n\n*Aaron Geller, 2018*\n\n### Imports and function definitions",
"_____no_output_____"
]
],
[
[
"#This directory contains all the data needed for the module. It should be in the same directory as the notebook\ndataFolder = \"data\" \nimport sys, os, shutil, errno, string, urllib\n\n\nsys.path.append(( os.path.abspath( os.path.join(os.path.realpath(\"__file__\"), os.pardir, os.pardir) )))\nimport uvmodlib.v1 as uvmod",
"_____no_output_____"
]
],
[
[
"### USES Conf Template ",
"_____no_output_____"
]
],
[
[
"Template = \"\"\"mesh\n{ \n data center ./modules/$folderName/center.raw #I get errors if I don't pass something to geometry??\n data GRB ./modules/$folderName/GRB.raw\n data grid ./modules/$folderName/grid512.obj\n data quad ./modules/$folderName/quad.3ds\n \n cullRadius $cr\n glslVersion 330\n \n propertyCollection \n { \n __objectName__\n { \n\n vec2f pfit 1.38504943e-05 3.73532968e-01\n vec1f m1 1.4\n vec1f m2 1.4\n \n \n vec1f NSrad 10 | public | desc \"NS radius \" | widget slider | range 0 100\n vec1f kilonovaRad 20000 | public | desc \"kilonova max radius \" | widget slider | range 0 1e5\n vec1f kilonovaMaxT 0.3 | public | desc \"time at max kilonova radius \" | widget slider | range 0 1\n vec1f GRBrad 3 | public | desc \"GRB point radius \" | widget slider | range 0 10\n vec1f GRBspeed 1000 | public | desc \"GRB speed \" | widget slider | range 0 1000\n vec1f GRBMaxT 1 | public | desc \"GRB fade time\" | widget slider | range 0 10\n vec1f coronaFac 3 | public | desc \"corona radius multiplier\" | widget slider | range 0 10\n\n #for GW membrane\n vec4f membraneColor .3 .3 .6 0.2 | public | desc \"GW mesh color\" \n #This chages both the display size and the domain in which the Ricci Scalar is evaluated (e.g. if the value is 100, the curvature is evaluated in the domain where -100 < x < 100, and similarly for y)\n vec1f gridScale 5000 | public| desc \"GW grid domain\" | widget slider | range 0 1e5\n # This factor controls the contrast between the peaks and valleys of the membrane. Larger values increase contrast.\n vec1f shadingFactor 5000 | public | desc \"contrast for GW membrane\" | widget slider | range 0 1e5\n #This is an overall amplitude factor\n vec1f amplitudeFactor 430000 | public | desc \"GW amplitude factor\" | widget slider | range 0 1e6\n #The Ricci Scalar Curvature is divergant at the origin. In order to draw attention to the propigation of gravitational waves from the binary system we have added an artifical function of the form exp[-r0^2/r^2] which dramatically attenuates results close to the origin. This parameter is the value of r0 in our attenuation function.\n vec1f centerAttenuationDistance 35 | public | desc \"GW amplitude factor\" | widget slider | range 0 1000\n vec1f GWAmpClamp 300 | public | desc \"GW max amplitude \" | widget slider | range 1 1000\n \n vec1f eventTime -0.1 | public | desc \"event time \" #| widget slider | range -30 30\n vec1f transitionLength 30 | public | desc \"transition length in seconds\" \n bool jump true | public | desc \"jump to time without transition\" \n }\n }\n \n ############# to hold the time information\n renderTexture\n {\n name stateTexture\n width 1\n height 1\n numTextures 1\n isPingPong true\n isPersistent true\n isFramePersistent true\n internalTextureFormat GL_RGB32F\n magnify GL_NEAREST\n minify GL_NEAREST\n }\n ############# set Transition State\n pass \n {\n useDataObject quad\n renderTarget\n {\n name stateTexture\n enableColorClear false\n }\n shader\n {\n type defaultMeshShader\n {\n vertexShader ./modules/$folderName/pass0.vs\n fragmentShader ./modules/$folderName/state.fs\n textureFBO stateTexture stateTexture\n stateManagerVar __objectName__.transitionLength transitionLength \n stateManagerVar __objectName__.jump jump \n stateManagerVar __objectName__.eventTime eventTime \n parameter2f timeRange -30 30\n }\n } \n }\n \n ############# gravitation waves\n pass \n {\n useDataObject grid\n\n shader\n {\n type defaultMeshShader\n {\n vertexShader ./modules/$folderName/binaryGW.vs\n fragmentShader ./modules/$folderName/binaryGW.fs\n textureFBO stateTexture stateTexture\n \n #stateManagerVar __objectName__.eventTime eventTime\n stateManagerVar __objectName__.pfit pfit\n stateManagerVar __objectName__.membraneColor fillColor\n stateManagerVar __objectName__.gridScale gridScale\n stateManagerVar __objectName__.shadingFactor shadingFactor\n stateManagerVar __objectName__.amplitudeFactor A\n stateManagerVar __objectName__.centerAttenuationDistance killFunctionDecay\n stateManagerVar __objectName__.GWAmpClamp GWAmpClamp\n\n glState\n {\n UV_BLEND_ENABLE true\n UV_DEPTH_ENABLE true\n UV_CULL_FACE_ENABLE false\n }\n }\n }\n }\n \n\n ############# NS 1 \"corona\" \n pass\n {\n useDataObject center\n shader\n {\n type defaultMeshShader\n {\n geometryShader ./modules/$folderName/corona.gs\n vertexShader ./modules/$folderName/NS.vs\n fragmentShader ./modules/$folderName/corona.fs\n textureFBO stateTexture stateTexture\n \n parameter1f starNum 1. \n \n #stateManagerVar __objectName__.eventTime eventTime\n stateManagerVar __objectName__.pfit pfit\n stateManagerVar __objectName__.NSrad NSrad\n stateManagerVar __objectName__.m1 m1\n stateManagerVar __objectName__.m2 m2\n stateManagerVar __objectName__.coronaFac coronaFac\n \n glState\n {\n UV_CULL_FACE_ENABLE false\n UV_BLEND_ENABLE true\n UV_DEPTH_ENABLE false\n UV_WRITE_MASK_DEPTH true\n UV_BLEND_FUNC GL_SRC_ALPHA GL_ONE_MINUS_SRC_ALPHA\n }\n }\n\n } \n }\n ############# NS 2 \"corona\" \n pass\n {\n useDataObject center\n shader\n {\n type defaultMeshShader\n {\n geometryShader ./modules/$folderName/corona.gs\n vertexShader ./modules/$folderName/NS.vs\n fragmentShader ./modules/$folderName/corona.fs\n textureFBO stateTexture stateTexture\n \n parameter1f starNum 2. \n \n #stateManagerVar __objectName__.eventTime eventTime\n stateManagerVar __objectName__.pfit pfit\n stateManagerVar __objectName__.NSrad NSrad\n stateManagerVar __objectName__.m1 m1\n stateManagerVar __objectName__.m2 m2\n stateManagerVar __objectName__.coronaFac coronaFac\n \n glState\n {\n UV_CULL_FACE_ENABLE false\n UV_BLEND_ENABLE true\n UV_DEPTH_ENABLE false\n UV_WRITE_MASK_DEPTH true\n UV_BLEND_FUNC GL_SRC_ALPHA GL_ONE_MINUS_SRC_ALPHA\n }\n }\n\n } \n }\n \n ############# NS 1\n pass\n {\n useDataObject center\n shader\n {\n type defaultMeshShader\n {\n geometryShader ./modules/$folderName/NS.gs\n vertexShader ./modules/$folderName/NS.vs\n fragmentShader ./modules/$folderName/NS.fs\n textureFBO stateTexture stateTexture\n \n parameter1f starNum 1. \n \n #stateManagerVar __objectName__.eventTime eventTime\n stateManagerVar __objectName__.pfit pfit\n stateManagerVar __objectName__.NSrad NSrad\n stateManagerVar __objectName__.m1 m1\n stateManagerVar __objectName__.m2 m2\n \n glState\n {\n UV_CULL_FACE_ENABLE false\n UV_BLEND_ENABLE true\n UV_DEPTH_ENABLE false\n UV_WRITE_MASK_DEPTH true\n UV_BLEND_FUNC GL_SRC_ALPHA GL_ONE_MINUS_SRC_ALPHA\n }\n }\n\n } \n }\n ############# NS 2\n pass\n {\n useDataObject center\n shader\n {\n type defaultMeshShader\n {\n geometryShader ./modules/$folderName/NS.gs\n vertexShader ./modules/$folderName/NS.vs\n fragmentShader ./modules/$folderName/NS.fs\n textureFBO stateTexture stateTexture\n \n parameter1f starNum 2. \n\n #stateManagerVar __objectName__.eventTime eventTime\n stateManagerVar __objectName__.pfit pfit\n stateManagerVar __objectName__.NSrad NSrad\n stateManagerVar __objectName__.m1 m1\n stateManagerVar __objectName__.m2 m2\n \n glState\n {\n UV_CULL_FACE_ENABLE false\n UV_BLEND_ENABLE true\n UV_DEPTH_ENABLE false\n UV_WRITE_MASK_DEPTH true\n UV_BLEND_FUNC GL_SRC_ALPHA GL_ONE_MINUS_SRC_ALPHA\n }\n }\n\n } \n }\n\n\n ############# GRB\n pass\n {\n useDataObject GRB\n shader\n {\n type defaultMeshShader\n {\n geometryShader ./modules/$folderName/GRB.gs\n vertexShader ./modules/$folderName/kilonova.vs\n fragmentShader ./modules/$folderName/GRB.fs\n textureFBO stateTexture stateTexture\n \n #stateManagerVar __objectName__.eventTime eventTime\n stateManagerVar __objectName__.GRBrad GRBrad\n stateManagerVar __objectName__.GRBspeed GRBspeed\n stateManagerVar __objectName__.GRBMaxT GRBMaxT\n \n glState\n {\n UV_CULL_FACE_ENABLE false\n UV_BLEND_ENABLE true\n UV_DEPTH_ENABLE false\n UV_WRITE_MASK_DEPTH true\n UV_BLEND_FUNC GL_SRC_ALPHA GL_ONE\n }\n }\n\n } \n }\n ############# kilonova\n pass\n {\n useDataObject center\n shader\n {\n type defaultMeshShader\n {\n geometryShader ./modules/$folderName/kilonova.gs\n vertexShader ./modules/$folderName/kilonova.vs\n fragmentShader ./modules/$folderName/kilonova.fs\n textureFBO stateTexture stateTexture\n texture cmap ./modules/$folderName/cmap.png\n { \n wrapModeS GL_CLAMP_TO_EDGE\n wrapModeR GL_CLAMP_TO_EDGE\n colorspace linear\n } \n #stateManagerVar __objectName__.eventTime eventTime\n stateManagerVar __objectName__.kilonovaRad kilonovaRad\n stateManagerVar __objectName__.kilonovaMaxT kilonovaMaxT\n \n glState\n {\n UV_CULL_FACE_ENABLE false\n UV_BLEND_ENABLE true\n UV_DEPTH_ENABLE false\n UV_WRITE_MASK_DEPTH true\n UV_BLEND_FUNC GL_SRC_ALPHA GL_ONE_MINUS_SRC_ALPHA\n }\n }\n\n } \n }\n\n}\"\"\"",
"_____no_output_____"
]
],
[
[
"### Kilonova class",
"_____no_output_____"
]
],
[
[
"class Kilonova():\n def __init__(self, object):\n self.object = object\n uvmod.Utility.ensurerelativepathexsists(\"kilonova.gs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"kilonova.vs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"kilonova.fs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"NS.gs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"NS.vs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"NS.fs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"corona.gs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"corona.fs\",dataFolder) \n uvmod.Utility.ensurerelativepathexsists(\"binaryGW.vs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"binaryGW.fs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"GRB.gs\",dataFolder)\n uvmod.Utility.ensurerelativepathexsists(\"GRB.fs\",dataFolder)\n self.cr = 1000\n self.Scale = 1\n\n def generatemod(self):\n self.object.setgeometry(self.object.name+\"Mesh.usesconf\")\n return self.object.generatemod()\n def generatefiles(self, absOutDir, relOutDir):\n fileName = self.object.name+\"Mesh.usesconf\"\n s = string.Template(Template)\n f = open(absOutDir+\"\\\\\"+fileName, 'w')\n if f:\n f.write(s.substitute(folderName = relOutDir,\n cr = self.cr,\n Scale = self.Scale\n ))\n f.close()\n uvmod.Utility.copyfoldercontents(os.getcwd()+\"\\\\\"+dataFolder, absOutDir)",
"_____no_output_____"
]
],
[
[
"### Object Instantiation",
"_____no_output_____"
]
],
[
[
"model = Kilonova(uvmod.OrbitalObject())\nscene = uvmod.Scene()\nparentScene = uvmod.Scene()\nmodinfo = uvmod.ModuleInformation()\ngenerator = uvmod.Generator()",
"_____no_output_____"
]
],
[
[
"### Specify Settings and generate the module",
"_____no_output_____"
]
],
[
[
"scene.setname(\"Kilonova\")\nscene.setparent(\"MilkyWay\")\nscene.setunit(1000.0)\nscene.setentrydist(10000.)\nscene.setstaticposition(-35025580.45131495, -11010152.02509566, -15874043.79585574)\n\nmodel.object.setcameraradius(1.)\nmodel.object.setcoord(scene.name)\nmodel.object.setname(\"Kilonova\")\nmodel.object.setguiname(\"/KavliLecture/Larson/Kilonova\")\nmodel.object.settargetradius(20)\nmodel.object.showatstartup(False)\nmodel.cr = 10000\n\nmodinfo.setname(\"Kilonova\")\nmodinfo.setauthor(\"Aaron Geller<sup>1</sup> Jeffrey SubbaRao, and Shane Larson<sup>2</sup><br />(1)Adler Planetarium,<br />(2)Northwestern University\")\nmodinfo.cleardependencies()\nmodinfo.setdesc(\"Uniview module for LIGO event GW170817\")\n#modinfo.setthumbnail(\"data/R0010133.JPG\")\nmodinfo.setversion(\"1.0\")\n\ngenerator.generate(\"Kilonova\",[scene],[model],modinfo)\nuvmod.Utility.senduvcommand(model.object.name+\".reload\")",
"Unable to connect to Uniview\n"
]
],
[
[
"## Helper Functions for modifing code\n*Reload Module and Shaders in Uniview*",
"_____no_output_____"
]
],
[
[
"uvmod.Utility.senduvcommand(model.object.name+\".reload; system.reloadallshaders\")",
"Unable to connect to Uniview\n"
]
],
[
[
"*Copy modified Shader files and reload*",
"_____no_output_____"
]
],
[
[
"from config import Settings",
"_____no_output_____"
],
[
"uvmod.Utility.copyfoldercontents(os.getcwd()+\"\\\\\"+dataFolder, Settings.uvcustommodulelocation+'\\\\'+model.object.name)\nuvmod.Utility.senduvcommand(model.object.name+\".reload\")",
"_____no_output_____"
]
],
[
[
"### Create colormap texture",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib import cm\n\ngradient = np.linspace(0, 1, 256)\ngradient = np.vstack((gradient, gradient))\n\ndef plot_cmap(colormap):\n fig=plt.imshow(gradient, aspect=1, cmap=colormap)\n plt.axis('off')\n fig.axes.get_xaxis().set_visible(False)\n fig.axes.get_yaxis().set_visible(False)\n plt.savefig(\"data/cmap.png\", bbox_inches='tight',pad_inches=0)\n\nplot_cmap('hot_r')\n\nplt.show()",
"_____no_output_____"
]
],
[
[
"*Testing some fit numbers*",
"_____no_output_____"
]
],
[
[
"pfit = [1.38504943e-05, 3.73532968e-01]\nt = -0.1\nper = (-1.*pfit[0]*t)**pfit[1]; #seconds\nprint(per, 6./(per*86400))",
"0.006481031544404803 0.010715029539456128\n"
]
],
[
[
"## Kilonova position \n\n*From the [wikipedia page](https://en.wikipedia.org/wiki/GW170817)*",
"_____no_output_____"
]
],
[
[
"from astropy.coordinates import SkyCoord\nfrom astropy import units, constants",
"_____no_output_____"
],
[
"RA = \"13h 09m 48.08s\" #right ascension\nDec= \"−23d 22m 53.3s\" #declination\ndist = (40 *units.Mpc).to(units.pc) #distance\n\ncoord = SkyCoord(RA, Dec, dist)\n\nprint(coord.cartesian)",
"(-35025580.45131495, -11010152.02509566, -15874043.79585574) pc\n"
]
],
[
[
"*Check the semi-major axis at -0.1s*",
"_____no_output_____"
]
],
[
[
"import numpy as np\n\na = 1.38504943e-05\nb = 3.73532968e-01 \nt = -0.1\np = (-a*t)**b * units.s\nsma = (p**2.*constants.G*(2.*1.4*units.solMass)/(4.*np.pi**2.))**(1./3.)\nprint(p, sma.to(units.km))",
"0.006481031544404803 s 73.3949309755613 km\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a62fc362f74b41dcc314a75f41263b64951f4b5
| 715,000 |
ipynb
|
Jupyter Notebook
|
Notebooks/ExperimentAnalysisTimesteps.ipynb
|
thesneakysneak/memory_capacity_retention_rnns
|
594d7814a2adba212f4d01a98797ee3994e19709
|
[
"MIT"
] | null | null | null |
Notebooks/ExperimentAnalysisTimesteps.ipynb
|
thesneakysneak/memory_capacity_retention_rnns
|
594d7814a2adba212f4d01a98797ee3994e19709
|
[
"MIT"
] | null | null | null |
Notebooks/ExperimentAnalysisTimesteps.ipynb
|
thesneakysneak/memory_capacity_retention_rnns
|
594d7814a2adba212f4d01a98797ee3994e19709
|
[
"MIT"
] | null | null | null | 534.379671 | 44,232 | 0.932124 |
[
[
[
"# Experiment Analysis\nIn this notebook we will evaluate the results form the experiments executed. For each experiment, one parameter is changed and all others were kept constant as to determine the effect of one variable. \n\n**The goals of this analysis are:**\n1. Determine the relationship of the number of parameters in the neural network and the number of timesteps in the dataset\n2. Determine what effect increasing the number patterns are w.r.t. this relationship\n3. Determine what effect sparsity has on the capacity of the neural networks\n4. Investigate which activation function lead to the highest retention of information\n5. What type of network is able to retain the most information ",
"_____no_output_____"
],
[
"To determine whether a relationshop exists between the variable being investigated and the number of required parameters in each respective neural network, the Pearson correlation coefficient is used. The domain of this metric lies between -1 and +1 or in mathematical notation $P \\in [-1, 1]$. If there exists a strong positive relationship between variables, the Pearson coefficient will approach +1 and for the negative case -1. ",
"_____no_output_____"
]
],
[
[
"import pandas as pd \nimport numpy as np\nimport scipy\nimport sklearn\nimport pandas as pd\nfrom sqlalchemy import Column, Integer, String\nfrom sqlalchemy import create_engine, Column\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import sessionmaker\nimport seaborn as sns\nimport matplotlib.pyplot as plt",
"_____no_output_____"
],
[
"# Before using a cluster\n# Base = declarative_base()\n# engine = create_engine('postgresql://masters_user:password@localhost:5432/masters_experiments')",
"_____no_output_____"
]
],
[
[
"# Timesteps Analysis",
"_____no_output_____"
]
],
[
[
"df = pd.read_csv(\"timesteps.csv\", delimiter=\",\")\n\ndf.head(5)",
"_____no_output_____"
]
],
[
[
"## Number of parameters ∝ time steps",
"_____no_output_____"
],
[
"### Overall",
"_____no_output_____"
]
],
[
[
"from matplotlib import pyplot\ndef plot_by_filter(x_col, \n y_col, \n x_label='Sparsity length',\n y_label='Number of network parameters',\n title=\"Effect of sparsity on the number of parameters \\n in a neural network with activation \", \n hue=\"network_type\", \n filter_col=\"activation_function\", \n filter_val=\"tanh\",\n legend_title=\"NN TYPE\",\n df=None):\n sns.set_style(\"whitegrid\")\n a4_dims = (6, 2.25)\n fig, ax = pyplot.subplots(figsize=a4_dims)\n ax.set(xlabel=x_label, \n ylabel=y_label )\n if filter_val is not None:\n ax = sns.pointplot(ax=ax, x=x_col, y=y_col, hue=hue, \n marker='o', markersize=5, ci=None,\n data = df[df[filter_col] == filter_val])\n \n ax.axes.set_title(title + filter_val,\n fontsize=12, y=1.05)\n ax.legend(title=filter_val.upper(), loc='center right', bbox_to_anchor=(1.37, 0.5), ncol=1)\n else:\n ax = sns.pointplot(ax=ax, x=x_col, y=y_col, hue=hue, \n marker='o', markersize=5, ci=None,\n data = df)\n ax.axes.set_title(title, fontsize=12, y=1.05)\n ax.legend(title=legend_title, loc='center right', bbox_to_anchor=(1.37, 0.5), ncol=1)\n\n # plt.legend()\nfilter_col = \"network_type\"\n \nplot_by_filter(x_col=\"timesteps\", \n y_col=\"num_network_parameters\",\n x_label='Timesteps',\n y_label='Number of network parameters',\n title=\"Effect of timesteps on the number of parameters \" + \n \"\\n in a neural network over all activation functions\", \n hue=\"network_type\",\n filter_col=filter_col, filter_val=None, df=df) \n\n\n \n",
"_____no_output_____"
],
[
"filter_col = \"network_type\"\nfor filter_val in df[filter_col].unique():\n df_temp = df[df[filter_col] == filter_val]\n df_temp = df_temp.groupby([\"timesteps\", \"network_type\"]).agg({\"num_network_parameters\": \"mean\"}).to_records()\n df_temp = pd.DataFrame.from_records(df_temp)\n df_temp[\"timesteps\"] = df_temp[\"timesteps\"].astype(float)\n df_temp[\"num_network_parameters\"] = df_temp[\"num_network_parameters\"].astype(float)\n \n print(\"Pearson Correlation Between Timesteps and Number of Network Parameters for\", filter_val, df_temp[\"timesteps\"].corr(df_temp[\"num_network_parameters\"]), type=\"spearman\")",
"_____no_output_____"
]
],
[
[
"### Ratio of required parameters for increase in time steps ",
"_____no_output_____"
]
],
[
[
"df_temp = df.groupby([\"timesteps\", \"network_type\"]).agg({\"num_network_parameters\": \"mean\"}).to_records()\ndf_temp = pd.DataFrame.from_records(df_temp)\ndf_temp.pivot(index=\"timesteps\", columns=\"network_type\", values=\"num_network_parameters\").head(11)",
"_____no_output_____"
]
],
[
[
"### Discussion of results\nFrom the Pearson coefficient, it is seems as if increasing the number of timesteps increases the number of required parameters for the Elman and GRU RNNs, while decreasing this requirement for the LSTM. However, upon inspecting the graph and values in the table, it is more apparent that this small correlation is due to variablity in the experiment during training. Thus it is safe to assume that there is no correlation between the number of network parameters required and the number of time steps if sparsity, number of patterns and output nodes are fixed for the **average case**.",
"_____no_output_____"
],
[
"### Effect of timesteps on networks with specific activation functions",
"_____no_output_____"
]
],
[
[
"filter_col = \"activation_function\"\nfor filter_val in df[filter_col].unique():\n df_temp = df[(df[filter_col] == filter_val)]\n df_temp = df_temp.groupby([\"timesteps\"]).agg({\"num_network_parameters\": \"mean\"}).to_records()\n df_temp = pd.DataFrame.from_records(df_temp)\n df_temp[\"timesteps\"] = df_temp[\"timesteps\"].astype(float)\n df_temp[\"num_network_parameters\"] = df_temp[\"num_network_parameters\"].astype(float)\n \n print(\"Pearson Correlation Between Timesteps and Number of Network Parameters for\", filter_val, df_temp[\"timesteps\"].corr(df_temp[\"num_network_parameters\"]))",
"Pearson Correlation Between Timesteps and Number of Network Parameters for softmax -0.28069178610689477\nPearson Correlation Between Timesteps and Number of Network Parameters for elu -0.03581183697143837\nPearson Correlation Between Timesteps and Number of Network Parameters for selu 0.5240563981875065\nPearson Correlation Between Timesteps and Number of Network Parameters for softplus 0.3038218101251\nPearson Correlation Between Timesteps and Number of Network Parameters for softsign 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for tanh 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for sigmoid 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for hard_sigmoid 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for relu 0.042276570772055384\nPearson Correlation Between Timesteps and Number of Network Parameters for linear -0.2818175694645072\n"
],
[
"df_temp = df.groupby([\"timesteps\", \"activation_function\"]).agg({\"num_network_parameters\": \"mean\"}).to_records()\ndf_temp = pd.DataFrame.from_records(df_temp)\ndf_temp.pivot(index=\"timesteps\", columns=\"activation_function\", values=\"num_network_parameters\").head(11)",
"_____no_output_____"
]
],
[
[
"### Discussion of activation functions ∝ time steps\nThe correlation coefficient between the required network parameters required and the increase in time steps for respective activation functions indicate that for **most activation functions**, increasing time steps will not have an effect on the required parameters of the network. \n\nInterestingly enough, this is not the case for the **selu** and **softplus**. For networks using these activation functions, the amount of memory loss is effected by the increase in timesteps.\n\nThe **softmax** and **linear** activation functions seem to cope the best with the increase in timesteps and the **relu** activation function has the highest variance. The high variance of the **relu** function lends itself to be usefull in avoiding local optima. ",
"_____no_output_____"
]
],
[
[
"filter_col = \"activation_function\"\nfor filter_val in df[filter_col].unique():\n for filter_val_1 in df[\"network_type\"].unique():\n df_temp = df[df[\"network_type\"] == filter_val_1]\n df_temp = df[df[filter_col] == filter_val]\n df_temp = df_temp.groupby([\"timesteps\"]).agg({\"num_network_parameters\": \"mean\"}).to_records()\n df_temp = pd.DataFrame.from_records(df_temp)\n df_temp[\"timesteps\"] = df_temp[\"timesteps\"].astype(float)\n df_temp[\"num_network_parameters\"] = df_temp[\"num_network_parameters\"].astype(float)\n\n print(\"Pearson Correlation Between Timesteps and Number of Network Parameters for\", filter_val_1 + \" \"+ filter_val, df_temp[\"timesteps\"].corr(df_temp[\"num_network_parameters\"]))",
"Pearson Correlation Between Timesteps and Number of Network Parameters for lstm softmax -0.28069178610689477\nPearson Correlation Between Timesteps and Number of Network Parameters for gru softmax -0.28069178610689477\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn softmax -0.28069178610689477\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm elu -0.03581183697143837\nPearson Correlation Between Timesteps and Number of Network Parameters for gru elu -0.03581183697143837\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn elu -0.03581183697143837\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm selu 0.5240563981875065\nPearson Correlation Between Timesteps and Number of Network Parameters for gru selu 0.5240563981875065\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn selu 0.5240563981875065\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm softplus 0.3038218101251\nPearson Correlation Between Timesteps and Number of Network Parameters for gru softplus 0.3038218101251\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn softplus 0.3038218101251\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm softsign 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for gru softsign 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn softsign 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm tanh 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for gru tanh 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn tanh 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm sigmoid 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for gru sigmoid 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn sigmoid 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm hard_sigmoid 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for gru hard_sigmoid 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn hard_sigmoid 0.0\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm relu 0.042276570772055384\nPearson Correlation Between Timesteps and Number of Network Parameters for gru relu 0.042276570772055384\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn relu 0.042276570772055384\nPearson Correlation Between Timesteps and Number of Network Parameters for lstm linear -0.2818175694645072\nPearson Correlation Between Timesteps and Number of Network Parameters for gru linear -0.2818175694645072\nPearson Correlation Between Timesteps and Number of Network Parameters for elman_rnn linear -0.2818175694645072\n"
],
[
"filter_col = \"activation_function\"\nfor filter_val in df[filter_col].unique():\n plot_by_filter(x_col=\"timesteps\", \n y_col=\"num_network_parameters\",\n x_label='Timesteps',\n y_label='Number of network parameters',\n title=\"Effect of timesteps on the number of parameters \" + \n \"\\n in a neural network with activation \" +str(filter_val), \n hue=\"network_type\",\n filter_col=filter_col, filter_val=filter_val, df=df) \n ",
"_____no_output_____"
]
],
[
[
"Comparing the correlation between the type of neural network and activation function it is clear that assumptions made about activation functions hold for all recurrent neural networks.",
"_____no_output_____"
],
[
"### Effect of time steps on training time ",
"_____no_output_____"
]
],
[
[
"filter_col = \"network_type\"\n \nplot_by_filter(x_col=\"timesteps\", \n y_col=\"epocs\",\n x_label='Timesteps',\n y_label='Number of EPOCS required to train network',\n title=\"Effect of timesteps on training time \", \n hue=\"network_type\",\n filter_col=filter_col, filter_val=None, df=df) ",
"_____no_output_____"
]
],
[
[
"### Effect of time steps on training time ",
"_____no_output_____"
]
],
[
[
"filter_col = \"activation_function\"\nfor filter_val in df[filter_col].unique():\n plot_by_filter(x_col=\"timesteps\", \n y_col=\"epocs\",\n x_label='Timesteps',\n y_label='Number of EPOCS required to train network',\n title=\"Effect of timesteps on training time \" +\n \"\\n for a neural network with activation \" +str(filter_val), \n hue=\"network_type\",\n filter_col=filter_col, filter_val=filter_val, df=df) \n ",
"_____no_output_____"
]
],
[
[
"### Conclusion about capacity?",
"_____no_output_____"
],
[
"Increasing the number of time steps does not have a direct effect on the performance of RNN's when all other parameters are kept constant. It is important to note, increasing time steps can dramatically effect size of the search space. Increasing the number of timesteps will exponentially increase the search space if all possible patterns in that search space is explored. During the execution of these experiments, all 46 GB of memory would be utilised. For an input space of $2$ binary inputs and $15$ time steps, the total number of possible patterns become $(2^2)^{15} = 1073741824$. ",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
]
] |
4a63033d684e2826223c5a976634e59ca8822e25
| 459,602 |
ipynb
|
Jupyter Notebook
|
-Predict-the-Insurance-Claim-using-Logistic-Regression/logistic_regression_student_template.ipynb
|
charmichaniyara/ga-learner-dsmp-repo
|
b807ae5e9373e8b95426ff888e6ca6f6b3bbe2c1
|
[
"MIT"
] | null | null | null |
-Predict-the-Insurance-Claim-using-Logistic-Regression/logistic_regression_student_template.ipynb
|
charmichaniyara/ga-learner-dsmp-repo
|
b807ae5e9373e8b95426ff888e6ca6f6b3bbe2c1
|
[
"MIT"
] | null | null | null |
-Predict-the-Insurance-Claim-using-Logistic-Regression/logistic_regression_student_template.ipynb
|
charmichaniyara/ga-learner-dsmp-repo
|
b807ae5e9373e8b95426ff888e6ca6f6b3bbe2c1
|
[
"MIT"
] | null | null | null | 391.150638 | 369,416 | 0.923971 |
[
[
[
"import numpy as np\nimport pandas as pd\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.model_selection import GridSearchCV, RandomizedSearchCV\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.metrics import roc_auc_score\nfrom sklearn import metrics\n\nimport warnings\nwarnings.filterwarnings('ignore')",
"_____no_output_____"
]
],
[
[
"### Load the dataset\n\n- Load the train data and using all your knowledge of pandas try to explore the different statistical properties of the dataset.",
"_____no_output_____"
]
],
[
[
"# Code starts here\n\n\ntrain = pd.read_csv('E:/GreyAtom/glab proj/Predict the Insurance Claim using Logistic Regression/train.csv')\ntrain.head()\n",
"_____no_output_____"
],
[
"train.shape",
"_____no_output_____"
],
[
"train.info()",
"<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 1070 entries, 0 to 1069\nData columns (total 9 columns):\n # Column Non-Null Count Dtype \n--- ------ -------------- ----- \n 0 Id 1070 non-null int64 \n 1 age 1070 non-null int64 \n 2 sex 1070 non-null int64 \n 3 bmi 1070 non-null float64\n 4 children 1070 non-null int64 \n 5 smoker 1070 non-null int64 \n 6 region 1070 non-null int64 \n 7 charges 1070 non-null float64\n 8 insuranceclaim 1070 non-null int64 \ndtypes: float64(2), int64(7)\nmemory usage: 75.3 KB\n"
],
[
"train.describe()",
"_____no_output_____"
],
[
"#Drop ID\ntrain.drop('Id', axis=1, inplace=True)\ntrain.head()",
"_____no_output_____"
],
[
"#Checking for distribution of target\ntrain['insuranceclaim'].value_counts().plot(kind='bar')",
"_____no_output_____"
],
[
"#Checking for skewness for features\ntrain.skew()",
"_____no_output_____"
]
],
[
[
"### EDA & Data Preprocessing\n\n- Check for the categorical & continuous features. \n- Check out the best plots for plotting between categorical target and continuous features and try making some inferences from these plots.",
"_____no_output_____"
]
],
[
[
"# Code starts here\n\nplt.boxplot(train['bmi'])",
"_____no_output_____"
],
[
"train['insuranceclaim'].value_counts(normalize=True)",
"_____no_output_____"
],
[
"train.corr()",
"_____no_output_____"
],
[
"sns.pairplot(train)",
"_____no_output_____"
],
[
"#Check count_plot for different features vs target variable insuranceclaim\n# This will tell us which features are highly correlated with the target variable insuranceclaim and help us predict it better.\n\ncols = ['children', 'sex', 'region', 'smoker']\n\nfig,axes = plt.subplots(nrows=2, ncols=2, figsize=(20,20))\n\nfor i in range(0,2):\n for j in range(0,2):\n col = cols[i*2 + j]\n sns.countplot(x=train[col], hue=train['insuranceclaim'], ax=axes[i,j])",
"_____no_output_____"
]
],
[
[
"### Model building\n\n- Separate the features and target and then split the train data into train and validation set.\n- Now let's come to the actual task, using logistic regression, predict the insuranceclaim. Select the best model by cross-validation using Grid Search.\n- Try improving upon the `roc_auc_score` using different parameters for Grid Search that give the best score.\n\n",
"_____no_output_____"
]
],
[
[
"# Code starts here\n\n#Store independent and dependent variable\nX = train.drop('insuranceclaim', axis=1)\ny=train['insuranceclaim']\n",
"_____no_output_____"
],
[
"#Split dataset\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=6)",
"_____no_output_____"
],
[
"#Instantiate Logistic regression model\nlr = LogisticRegression(random_state=9)",
"_____no_output_____"
],
[
"#Grid search on Logistic regression\n\nlr.fit(X_train, y_train)",
"_____no_output_____"
],
[
"#make predictions\ny_pred = lr.predict(X_test)\naccuracy = accuracy_score(y_pred, y_test)\nprint('Accuracy is : ',accuracy)",
"Accuracy is : 0.7850467289719626\n"
],
[
"score = roc_auc_score(y_test, y_pred)\nprint('Score is : ', score)",
"Score is : 0.7825268817204301\n"
],
[
"#visualize performance of a binary classifier.\n\n\nfpr, tpr, _ = metrics.roc_curve(y_test, y_pred)\nroc_auc = roc_auc_score(y_test, y_pred)\nprint(roc_auc)\nplt.plot(fpr,tpr,label=\"Logistic model, auc=\"+str(roc_auc))\nplt.legend(loc=4)\nplt.show()",
"0.7825268817204301\n"
]
],
[
[
"### Prediction on the test data and creating the sample submission file.\n\n- Load the test data and store the `Id` column in a separate variable.\n- Perform the same operations on the test data that you have performed on the train data.\n- Create the submission file as a `csv` file consisting of the `Id` column from the test data and your prediction as the second column.",
"_____no_output_____"
]
],
[
[
"# Code starts here\n\ntest = pd.read_csv('E:/GreyAtom/glab proj/Predict the Insurance Claim using Logistic Regression/test.csv')\ntest.head()\n",
"_____no_output_____"
],
[
"id_ = test['Id']\n\n# Applying same transformation on test\ntest.drop('Id',axis=1,inplace=True)\n\n# make predictions \ny_pred_test =lr.predict(test)\n\n# Create a sample submission file\nsample_submission = pd.DataFrame({'Id':id_,'insuranceclaim':y_pred_test})\nsample_submission.head()\n",
"_____no_output_____"
],
[
"# Convert the sample submission file into a csv file\nsample_submission.to_csv('Sample_submission_1.csv',index=False)",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
4a6307135b5a681ebec71875495dfba749d3b440
| 26,355 |
ipynb
|
Jupyter Notebook
|
notebooks/building_production_ml_systems/labs/.ipynb_checkpoints/2_hyperparameter_tuning-checkpoint.ipynb
|
jfesteban/Google-ASL
|
8e991a437e348b1950cdc351dba39e2d40a6b08f
|
[
"Apache-2.0"
] | null | null | null |
notebooks/building_production_ml_systems/labs/.ipynb_checkpoints/2_hyperparameter_tuning-checkpoint.ipynb
|
jfesteban/Google-ASL
|
8e991a437e348b1950cdc351dba39e2d40a6b08f
|
[
"Apache-2.0"
] | null | null | null |
notebooks/building_production_ml_systems/labs/.ipynb_checkpoints/2_hyperparameter_tuning-checkpoint.ipynb
|
jfesteban/Google-ASL
|
8e991a437e348b1950cdc351dba39e2d40a6b08f
|
[
"Apache-2.0"
] | null | null | null | 38.474453 | 553 | 0.579435 |
[
[
[
"# Hyper-parameter tuning\n\n**Learning Objectives**\n1. Learn how to use `cloudml-hypertune` to report the results for Cloud hyperparameter tuning trial runs\n2. Learn how to configure the `.yaml` file for submitting a Cloud hyperparameter tuning job\n3. Submit a hyperparameter tuning job to Cloud AI Platform\n\n## Introduction\n\nLet's see if we can improve upon that by tuning our hyperparameters.\n\nHyperparameters are parameters that are set *prior* to training a model, as opposed to parameters which are learned *during* training. \n\nThese include learning rate and batch size, but also model design parameters such as type of activation function and number of hidden units.\n\nHere are the four most common ways to finding the ideal hyperparameters:\n1. Manual\n2. Grid Search\n3. Random Search\n4. Bayesian Optimzation\n\n**1. Manual**\n\nTraditionally, hyperparameter tuning is a manual trial and error process. A data scientist has some intution about suitable hyperparameters which they use as a starting point, then they observe the result and use that information to try a new set of hyperparameters to try to beat the existing performance. \n\nPros\n- Educational, builds up your intuition as a data scientist\n- Inexpensive because only one trial is conducted at a time\n\nCons\n- Requires alot of time and patience\n\n**2. Grid Search**\n\nOn the other extreme we can use grid search. Define a discrete set of values to try for each hyperparameter then try every possible combination. \n\nPros\n- Can run hundreds of trials in parallel using the cloud\n- Gauranteed to find the best solution within the search space\n\nCons\n- Expensive\n\n**3. Random Search**\n\nAlternatively define a range for each hyperparamter (e.g. 0-256) and sample uniformly at random from that range. \n\nPros\n- Can run hundreds of trials in parallel using the cloud\n- Requires less trials than Grid Search to find a good solution\n\nCons\n- Expensive (but less so than Grid Search)\n\n**4. Bayesian Optimization**\n\nUnlike Grid Search and Random Search, Bayesian Optimization takes into account information from past trials to select parameters for future trials. The details of how this is done is beyond the scope of this notebook, but if you're interested you can read how it works here [here](https://cloud.google.com/blog/products/gcp/hyperparameter-tuning-cloud-machine-learning-engine-using-bayesian-optimization). \n\nPros\n- Picks values intelligenty based on results from past trials\n- Less expensive because requires fewer trials to get a good result\n\nCons\n- Requires sequential trials for best results, takes longer\n\n**AI Platform HyperTune**\n\nAI Platform HyperTune, powered by [Google Vizier](https://ai.google/research/pubs/pub46180), uses Bayesian Optimization by default, but [also supports](https://cloud.google.com/ml-engine/docs/tensorflow/hyperparameter-tuning-overview#search_algorithms) Grid Search and Random Search. \n\n\nWhen tuning just a few hyperparameters (say less than 4), Grid Search and Random Search work well, but when tunining several hyperparameters and the search space is large Bayesian Optimization is best.",
"_____no_output_____"
]
],
[
[
"PROJECT = \"<YOUR PROJECT>\"\nBUCKET = \"<YOUR BUCKET>\"\nREGION = \"<YOUR REGION>\"\nTFVERSION = \"2.1\" # TF version for AI Platform to use",
"_____no_output_____"
],
[
"import os \nos.environ[\"PROJECT\"] = PROJECT\nos.environ[\"BUCKET\"] = BUCKET\nos.environ[\"REGION\"] = REGION\nos.environ[\"TFVERSION\"] = TFVERSION ",
"_____no_output_____"
]
],
[
[
"## Make code compatible with AI Platform Training Service\nIn order to make our code compatible with AI Platform Training Service we need to make the following changes:\n\n1. Upload data to Google Cloud Storage \n2. Move code into a trainer Python package\n4. Submit training job with `gcloud` to train on AI Platform",
"_____no_output_____"
],
[
"### Upload data to Google Cloud Storage (GCS)\n\nCloud services don't have access to our local files, so we need to upload them to a location the Cloud servers can read from. In this case we'll use GCS.\n\nTo do this run the notebook [0_export_data_from_bq_to_gcs.ipynb](./0_export_data_from_bq_to_gcs.ipynb), which will export the taxifare data from BigQuery directly into a GCS bucket. If all ran smoothly, you should be able to list the data bucket by running the following command:",
"_____no_output_____"
]
],
[
[
"!gsutil ls gs://$BUCKET/taxifare/data",
"_____no_output_____"
]
],
[
[
"## Move code into python package\n\nIn the [previous lab](https://github.com/GoogleCloudPlatform/training-data-analyst/blob/master/courses/machine_learning/deepdive2/building_production_ml_systems/labs/1_training_at_scale.ipynb), we moved our code into a python package for training on Cloud AI Platform. Let's just check that the files are there. You should see the following files in the `taxifare/trainer` directory:\n - `__init__.py`\n - `model.py`\n - `task.py`",
"_____no_output_____"
]
],
[
[
"!ls -la taxifare/trainer",
"_____no_output_____"
]
],
[
[
"To use hyperparameter tuning in your training job you must perform the following steps:\n\n 1. Specify the hyperparameter tuning configuration for your training job by including a HyperparameterSpec in your TrainingInput object.\n\n 2. Include the following code in your training application:\n\n - Parse the command-line arguments representing the hyperparameters you want to tune, and use the values to set the hyperparameters for your training trial.\nAdd your hyperparameter metric to the summary for your graph.\n\n - To submit a hyperparameter tuning job, we must modify `model.py` and `task.py` to expose any variables we want to tune as command line arguments.",
"_____no_output_____"
],
[
"### Modify model.py",
"_____no_output_____"
],
[
"## Exercise.\n\nComplete the TODOs in the `train_and_evaluate` functin below. \n\n - Define the hyperparameter tuning metric `hp_metric`\n - Set up cloudml-hypertune to report the results of each trial by calling its helper function, `report_hyperparameter_tuning_metric`",
"_____no_output_____"
]
],
[
[
"%%writefile ./taxifare/trainer/model.py\nimport datetime\nimport hypertune\nimport logging\nimport os\nimport shutil\n\nimport numpy as np\nimport tensorflow as tf\n\nfrom tensorflow.keras import activations\nfrom tensorflow.keras import callbacks\nfrom tensorflow.keras import layers\nfrom tensorflow.keras import models\n\nfrom tensorflow import feature_column as fc\n\nlogging.info(tf.version.VERSION)\n\n\nCSV_COLUMNS = [\n 'fare_amount',\n 'pickup_datetime',\n 'pickup_longitude',\n 'pickup_latitude',\n 'dropoff_longitude',\n 'dropoff_latitude',\n 'passenger_count',\n 'key',\n]\nLABEL_COLUMN = 'fare_amount'\nDEFAULTS = [[0.0], ['na'], [0.0], [0.0], [0.0], [0.0], [0.0], ['na']]\nDAYS = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']\n\n\ndef features_and_labels(row_data):\n for unwanted_col in ['key']:\n row_data.pop(unwanted_col)\n label = row_data.pop(LABEL_COLUMN)\n return row_data, label\n\n\ndef load_dataset(pattern, batch_size, num_repeat):\n dataset = tf.data.experimental.make_csv_dataset(\n file_pattern=pattern,\n batch_size=batch_size,\n column_names=CSV_COLUMNS,\n column_defaults=DEFAULTS,\n num_epochs=num_repeat,\n )\n return dataset.map(features_and_labels)\n\n\ndef create_train_dataset(pattern, batch_size):\n dataset = load_dataset(pattern, batch_size, num_repeat=None)\n return dataset.prefetch(1)\n\n\ndef create_eval_dataset(pattern, batch_size):\n dataset = load_dataset(pattern, batch_size, num_repeat=1)\n return dataset.prefetch(1)\n\n\ndef parse_datetime(s):\n if type(s) is not str:\n s = s.numpy().decode('utf-8')\n return datetime.datetime.strptime(s, \"%Y-%m-%d %H:%M:%S %Z\")\n\n\ndef euclidean(params):\n lon1, lat1, lon2, lat2 = params\n londiff = lon2 - lon1\n latdiff = lat2 - lat1\n return tf.sqrt(londiff*londiff + latdiff*latdiff)\n\n\ndef get_dayofweek(s):\n ts = parse_datetime(s)\n return DAYS[ts.weekday()]\n\n\[email protected]\ndef dayofweek(ts_in):\n return tf.map_fn(\n lambda s: tf.py_function(get_dayofweek, inp=[s], Tout=tf.string),\n ts_in\n )\n\n\[email protected]\ndef fare_thresh(x):\n return 60 * activations.relu(x)\n\n\ndef transform(inputs, NUMERIC_COLS, STRING_COLS, nbuckets):\n # Pass-through columns\n transformed = inputs.copy()\n del transformed['pickup_datetime']\n\n feature_columns = {\n colname: fc.numeric_column(colname)\n for colname in NUMERIC_COLS\n }\n\n # Scaling longitude from range [-70, -78] to [0, 1]\n for lon_col in ['pickup_longitude', 'dropoff_longitude']:\n transformed[lon_col] = layers.Lambda(\n lambda x: (x + 78)/8.0,\n name='scale_{}'.format(lon_col)\n )(inputs[lon_col])\n\n # Scaling latitude from range [37, 45] to [0, 1]\n for lat_col in ['pickup_latitude', 'dropoff_latitude']:\n transformed[lat_col] = layers.Lambda(\n lambda x: (x - 37)/8.0,\n name='scale_{}'.format(lat_col)\n )(inputs[lat_col])\n\n # Adding Euclidean dist (no need to be accurate: NN will calibrate it)\n transformed['euclidean'] = layers.Lambda(euclidean, name='euclidean')([\n inputs['pickup_longitude'],\n inputs['pickup_latitude'],\n inputs['dropoff_longitude'],\n inputs['dropoff_latitude']\n ])\n feature_columns['euclidean'] = fc.numeric_column('euclidean')\n\n # hour of day from timestamp of form '2010-02-08 09:17:00+00:00'\n transformed['hourofday'] = layers.Lambda(\n lambda x: tf.strings.to_number(\n tf.strings.substr(x, 11, 2), out_type=tf.dtypes.int32),\n name='hourofday'\n )(inputs['pickup_datetime'])\n feature_columns['hourofday'] = fc.indicator_column(\n fc.categorical_column_with_identity(\n 'hourofday', num_buckets=24))\n\n latbuckets = np.linspace(0, 1, nbuckets).tolist()\n lonbuckets = np.linspace(0, 1, nbuckets).tolist()\n b_plat = fc.bucketized_column(\n feature_columns['pickup_latitude'], latbuckets)\n b_dlat = fc.bucketized_column(\n feature_columns['dropoff_latitude'], latbuckets)\n b_plon = fc.bucketized_column(\n feature_columns['pickup_longitude'], lonbuckets)\n b_dlon = fc.bucketized_column(\n feature_columns['dropoff_longitude'], lonbuckets)\n ploc = fc.crossed_column(\n [b_plat, b_plon], nbuckets * nbuckets)\n dloc = fc.crossed_column(\n [b_dlat, b_dlon], nbuckets * nbuckets)\n pd_pair = fc.crossed_column([ploc, dloc], nbuckets ** 4)\n feature_columns['pickup_and_dropoff'] = fc.embedding_column(\n pd_pair, 100)\n\n return transformed, feature_columns\n\n\ndef rmse(y_true, y_pred):\n return tf.sqrt(tf.reduce_mean(tf.square(y_pred - y_true)))\n\n\ndef build_dnn_model(nbuckets, nnsize, lr):\n # input layer is all float except for pickup_datetime which is a string\n STRING_COLS = ['pickup_datetime']\n NUMERIC_COLS = (\n set(CSV_COLUMNS) - set([LABEL_COLUMN, 'key']) - set(STRING_COLS)\n )\n inputs = {\n colname: layers.Input(name=colname, shape=(), dtype='float32')\n for colname in NUMERIC_COLS\n }\n inputs.update({\n colname: layers.Input(name=colname, shape=(), dtype='string')\n for colname in STRING_COLS\n })\n\n # transforms\n transformed, feature_columns = transform(\n inputs, NUMERIC_COLS, STRING_COLS, nbuckets=nbuckets)\n dnn_inputs = layers.DenseFeatures(feature_columns.values())(transformed)\n\n x = dnn_inputs\n for layer, nodes in enumerate(nnsize):\n x = layers.Dense(nodes, activation='relu', name='h{}'.format(layer))(x)\n output = layers.Dense(1, name='fare')(x)\n \n model = models.Model(inputs, output)\n lr_optimizer = tf.keras.optimizers.Adam(learning_rate=lr)\n model.compile(optimizer=lr_optimizer, loss='mse', metrics=[rmse, 'mse'])\n \n return model\n\n\ndef train_and_evaluate(hparams):\n batch_size = hparams['batch_size']\n eval_data_path = hparams['eval_data_path']\n nnsize = hparams['nnsize']\n nbuckets = hparams['nbuckets']\n lr = hparams['lr']\n num_evals = hparams['num_evals']\n num_examples_to_train_on = hparams['num_examples_to_train_on']\n output_dir = hparams['output_dir']\n train_data_path = hparams['train_data_path']\n\n if tf.io.gfile.exists(output_dir):\n tf.io.gfile.rmtree(output_dir)\n\n timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')\n savedmodel_dir = os.path.join(output_dir, 'savedmodel')\n model_export_path = os.path.join(savedmodel_dir, timestamp)\n checkpoint_path = os.path.join(output_dir, 'checkpoints')\n tensorboard_path = os.path.join(output_dir, 'tensorboard')\n \n dnn_model = build_dnn_model(nbuckets, nnsize, lr)\n logging.info(dnn_model.summary())\n\n trainds = create_train_dataset(train_data_path, batch_size)\n evalds = create_eval_dataset(eval_data_path, batch_size)\n\n steps_per_epoch = num_examples_to_train_on // (batch_size * num_evals)\n\n checkpoint_cb = callbacks.ModelCheckpoint(checkpoint_path,\n save_weights_only=True,\n verbose=1)\n\n tensorboard_cb = callbacks.TensorBoard(tensorboard_path,\n histogram_freq=1)\n\n history = dnn_model.fit(\n trainds,\n validation_data=evalds,\n epochs=num_evals,\n steps_per_epoch=max(1, steps_per_epoch),\n verbose=2, # 0=silent, 1=progress bar, 2=one line per epoch\n callbacks=[checkpoint_cb, tensorboard_cb]\n )\n\n # Exporting the model with default serving function.\n tf.saved_model.save(dnn_model, model_export_path)\n \n # TODO 1\n hp_metric = # TODO: Your code goes here\n \n # TODO 1\n hpt = # TODO: Your code goes here\n # TODO: Your code goes here\n\n return history\n",
"_____no_output_____"
]
],
[
[
"### Modify task.py",
"_____no_output_____"
]
],
[
[
"%%writefile taxifare/trainer/task.py\nimport argparse\nimport json\nimport os\n\nfrom trainer import model\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"--batch_size\",\n help = \"Batch size for training steps\",\n type = int,\n default = 32\n )\n parser.add_argument(\n \"--eval_data_path\",\n help = \"GCS location pattern of eval files\",\n required = True\n )\n parser.add_argument(\n \"--nnsize\",\n help = \"Hidden layer sizes (provide space-separated sizes)\",\n nargs = \"+\",\n type = int,\n default=[32, 8]\n )\n parser.add_argument(\n \"--nbuckets\",\n help = \"Number of buckets to divide lat and lon with\",\n type = int,\n default = 10\n )\n parser.add_argument(\n \"--lr\",\n help = \"learning rate for optimizer\",\n type = float,\n default = 0.001\n )\n parser.add_argument(\n \"--num_evals\",\n help = \"Number of times to evaluate model on eval data training.\",\n type = int,\n default = 5\n )\n parser.add_argument(\n \"--num_examples_to_train_on\",\n help = \"Number of examples to train on.\",\n type = int,\n default = 100\n )\n parser.add_argument(\n \"--output_dir\",\n help = \"GCS location to write checkpoints and export models\",\n required = True\n )\n parser.add_argument(\n \"--train_data_path\",\n help = \"GCS location pattern of train files containing eval URLs\",\n required = True\n )\n parser.add_argument(\n \"--job-dir\",\n help = \"this model ignores this field, but it is required by gcloud\",\n default = \"junk\"\n )\n\n args, _ = parser.parse_known_args()\n hparams = args.__dict__\n hparams[\"output_dir\"] = os.path.join(\n hparams[\"output_dir\"],\n json.loads(\n os.environ.get(\"TF_CONFIG\", \"{}\")\n ).get(\"task\", {}).get(\"trial\", \"\")\n )\n print(\"output_dir\", hparams[\"output_dir\"])\n model.train_and_evaluate(hparams)\n",
"_____no_output_____"
]
],
[
[
"### Create config.yaml file\n\nSpecify the hyperparameter tuning configuration for your training job\nCreate a HyperparameterSpec object to hold the hyperparameter tuning configuration for your training job, and add the HyperparameterSpec as the hyperparameters object in your TrainingInput object.\n\nIn your HyperparameterSpec, set the hyperparameterMetricTag to a value representing your chosen metric. If you don't specify a hyperparameterMetricTag, AI Platform Training looks for a metric with the name training/hptuning/metric. The following example shows how to create a configuration for a metric named metric1:",
"_____no_output_____"
],
[
"## Exercise.\n\nComplete the TODOs below. \n\n - Specify the hypertuning cofiguration for the learning rate, the batch size and the number of buckets using one of the available [hyperparameter types](https://cloud.google.com/ai-platform/training/docs/hyperparameter-tuning-overview#hyperparameter_types). \n - Specify the hyperparameter tuning metric tag\n - Set the maximum number of parallel trial and the max number of trials",
"_____no_output_____"
]
],
[
[
"%%writefile hptuning_config.yaml\ntrainingInput:\n scaleTier: BASIC\n hyperparameters:\n goal: MINIMIZE\n maxTrials: # TODO: Your code goes here\n maxParallelTrials: # TODO: Your code goes here\n hyperparameterMetricTag: # TODO: Your code goes here\n enableTrialEarlyStopping: True\n params:\n - parameterName: lr\n # TODO: Your code goes here\n - parameterName: nbuckets\n # TODO: Your code goes here\n - parameterName: batch_size\n # TODO: Your code goes here\n ",
"_____no_output_____"
]
],
[
[
"#### Report your hyperparameter metric to AI Platform Training\n\nThe way to report your hyperparameter metric to the AI Platform Training service depends on whether you are using TensorFlow for training or not. It also depends on whether you are using a runtime version or a custom container for training.\n\nWe recommend that your training code reports your hyperparameter metric to AI Platform Training frequently in order to take advantage of early stopping.\n\nTensorFlow with a runtime version\nIf you use an AI Platform Training runtime version and train with TensorFlow, then you can report your hyperparameter metric to AI Platform Training by writing the metric to a TensorFlow summary. Use one of the following functions.\n",
"_____no_output_____"
]
],
[
[
"%%bash\n\n# Output directory and jobID\nOUTDIR=gs://${BUCKET}/taxifare/trained_model_$(date -u +%y%m%d_%H%M%S)\nJOBID=taxifare_$(date -u +%y%m%d_%H%M%S)\necho ${OUTDIR} ${REGION} ${JOBID}\ngsutil -m rm -rf ${OUTDIR}\n\n# Model and training hyperparameters\nBATCH_SIZE=15\nNUM_EXAMPLES_TO_TRAIN_ON=100\nNUM_EVALS=10\nNBUCKETS=10\nLR=0.001\nNNSIZE=\"32 8\"\n\n# GCS paths\nGCS_PROJECT_PATH=gs://$BUCKET/taxifare\nDATA_PATH=$GCS_PROJECT_PATH/data\nTRAIN_DATA_PATH=$DATA_PATH/taxi-train*\nEVAL_DATA_PATH=$DATA_PATH/taxi-valid*\n\n# TODO\ngcloud ai-platform jobs submit training $JOBID \\\n # TODO: Your code goes here\n -- \\\n --eval_data_path $EVAL_DATA_PATH \\\n --output_dir $OUTDIR \\\n --train_data_path $TRAIN_DATA_PATH \\\n --batch_size $BATCH_SIZE \\\n --num_examples_to_train_on $NUM_EXAMPLES_TO_TRAIN_ON \\\n --num_evals $NUM_EVALS \\\n --nbuckets $NBUCKETS \\\n --lr $LR \\\n --nnsize $NNSIZE ",
"_____no_output_____"
]
],
[
[
"Copyright 2020 Google Inc. Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
4a6317494b980cefb73f11e4213f3f59106f7586
| 187,211 |
ipynb
|
Jupyter Notebook
|
code/Tratamento.ipynb
|
errearanhas/thesis
|
d5ad37f155d2695fa5d549f6f58796875c903b02
|
[
"MIT"
] | null | null | null |
code/Tratamento.ipynb
|
errearanhas/thesis
|
d5ad37f155d2695fa5d549f6f58796875c903b02
|
[
"MIT"
] | null | null | null |
code/Tratamento.ipynb
|
errearanhas/thesis
|
d5ad37f155d2695fa5d549f6f58796875c903b02
|
[
"MIT"
] | null | null | null | 86.631652 | 21,128 | 0.758983 |
[
[
[
"import pandas as pd\nimport os\nimport geopy as geo\nimport numpy as np\nfrom folium.plugins import FastMarkerCluster\nimport folium\nfrom geopy.geocoders import Nominatim\nimport matplotlib.pyplot as plt\n\n%matplotlib inline",
"_____no_output_____"
]
],
[
[
"## Importando a base e gerando um sample",
"_____no_output_____"
]
],
[
[
"os.getcwd()",
"_____no_output_____"
],
[
"df = pd.read_csv('/home/rsa/Documentos/database/2017-01-01.csv')\n\ndf = df.rename(columns={df.columns[0]: \"DATAHORA\", df.columns[1]:\"ORDEM\", df.columns[2]: \"LINHA\",df.columns[3]:\"LATITUDE\",df.columns[4]:\"LONGITUDE\",df.columns[5]: \"VELOCIDADE\"})\ndf = df.copy().drop_duplicates()\ndf['lat_long'] = list(zip(round(df.LATITUDE,5), round(df.LONGITUDE,5)))\n\nprint(df.shape)",
"_____no_output_____"
],
[
"df.head()",
"_____no_output_____"
],
[
"print('Linha:', df.LINHA.nunique())\nprint('Ordem:', df.ORDEM.nunique())\nprint('LATITUDE:', df.LATITUDE.nunique())\nprint('LONGITUDE:', df.LONGITUDE.nunique())\nprint('DATAHORA:', df.DATAHORA.nunique())",
"Linha: 397\nOrdem: 6494\nLATITUDE: 90600\nLONGITUDE: 112345\nDATAHORA: 82591\n"
],
[
"import seaborn as sns\n\n# sns.boxplot(x=df311g.lng)\n\nsns.boxplot(x=df.LATITUDE)",
"_____no_output_____"
],
[
"print(df.describe(include='all'))",
" DATAHORA ORDEM LINHA LATITUDE LONGITUDE \\\ncount 2881294 2881294 1867018 2.881294e+06 2.881294e+06 \nunique 82591 6494 397 NaN NaN \ntop 01-01-2017 08:24:30 C47491 878.0 NaN NaN \nfreq 100 2502 53428 NaN NaN \nmean NaN NaN NaN -2.290339e+01 -4.335681e+01 \nstd NaN NaN NaN 5.630601e-02 1.509855e-01 \nmin NaN NaN NaN -2.597741e+01 -5.432600e+01 \n25% NaN NaN NaN -2.295053e+01 -4.338890e+01 \n50% NaN NaN NaN -2.290070e+01 -4.334839e+01 \n75% NaN NaN NaN -2.287599e+01 -4.328511e+01 \nmax NaN NaN NaN 0.000000e+00 1.043624e+02 \n\n VELOCIDADE lat_long \ncount 2.881294e+06 2881294 \nunique NaN 1296150 \ntop NaN (-22.95306, -43.35091) \nfreq NaN 2169 \nmean 1.101972e+01 NaN \nstd 1.878057e+01 NaN \nmin 0.000000e+00 NaN \n25% 0.000000e+00 NaN \n50% 0.000000e+00 NaN \n75% 1.900000e+01 NaN \nmax 6.820900e+02 NaN \n"
],
[
"import datetime\n\n#df1 = df[df.LINHA.isin(['476.0'])].copy()\n#df1 = df.sample(1000000, random_state=1)\ndf1 = df.copy()\ndf1 = df1.rename(columns={'LATITUDE':'lat', 'LONGITUDE':'lng'})\n#df1[\"ID\"] = df1[\"ORDEM\"].map(str) +\"-\"+ df1[\"LINHA\"].map(str)\n\ndf1['DATAHORA'] = df1.apply(lambda x: datetime.datetime.strptime(x.DATAHORA,\"%m-%d-%Y %H:%M:%S\"),1)\n\ndf1 = df1[['DATAHORA', 'ORDEM', 'LINHA', 'lat_long', 'lat', 'lng']]\ndf1 = df1.drop_duplicates()\ndf1 = df1.sort_values(['LINHA', 'ORDEM', 'DATAHORA']).reset_index()\ndf1 = df1.drop(columns=['index'])\n\nprint(df1.shape)\ndf1.info()",
"(2881294, 6)\n<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 2881294 entries, 0 to 2881293\nData columns (total 6 columns):\nDATAHORA datetime64[ns]\nORDEM object\nLINHA object\nlat_long object\nlat float64\nlng float64\ndtypes: datetime64[ns](1), float64(2), object(3)\nmemory usage: 131.9+ MB\n"
],
[
"df1.head()",
"_____no_output_____"
],
[
"df1.groupby(df1.DATAHORA.dt.date).count()",
"_____no_output_____"
],
[
"import seaborn as sns\n\nprint(df1['lat'].describe())\nplt.figure(figsize=(9, 8))\nsns.distplot(df1['lat'], color='g', bins=100, hist_kws={'alpha': 0.4});\n#sns.distplot(df1['lng'], color='g', bins=100, hist_kws={'alpha': 0.4});",
"count 2.881294e+06\nmean -2.290339e+01\nstd 5.630601e-02\nmin -2.597741e+01\n25% -2.295053e+01\n50% -2.290070e+01\n75% -2.287599e+01\nmax 0.000000e+00\nName: lat, dtype: float64\n"
],
[
"### Visualizando os dados no mapa (Folium)\n\nimport folium\nfrom folium.plugins import MousePosition, MiniMap\nfrom folium.plugins import Draw\n\nlocs = [list(i) for i in df1.drop_duplicates(['lat_long'])['lat_long']]\n\nbrasil = folium.Map(\n #width=500,height=500,\n #tiles='OpenStreetMap',\n #tiles='cartodbpositron',\n tiles='CartoDB dark_matter',\n location=locs[0], \n zoom_start=11\n)\n\nfor i in locs: \n folium.CircleMarker(\n location=i,\n color=['blue'],\n radius=1,\n weight=3\n ).add_to(brasil)\n\npath = '/home/rsa/Documentos/thesis/plots_images/'\n\nmouse = MousePosition(position='topright')\ndraw = Draw(export=True)\nminimap = MiniMap(toggle_display=True, tile_layer='CartoDB dark_matter')\n\nmouse.add_to(brasil)\ndraw.add_to(brasil)\nminimap.add_to(brasil)\n\nbrasil.save(path + 'plot_data.html')",
"_____no_output_____"
],
[
"# IMPORTANDO E \"TRADUZINDO\" O POLÍGONO SELECIONADO, PARA EM SEGUIDA SELECIONAR OS PONTOS ENGLOBADOS\n\nimport geojson\nfrom shapely.geometry.polygon import Polygon\n\nwith open('/home/rsa/Downloads/data.geojson') as f:\n gj = geojson.load(f)\n\npolygon = gj['features'][0].geometry.coordinates[0]\npol = Polygon(polygon) # create polygon\nprint(polygon)\npol",
"_____no_output_____"
],
[
"# ESTA CÉLULA DEVE SER RODADA APENAS EM CASO DE SELEÇÃO DE POLÍGONO\n\n# SELECIONANDO OS PONTOS ENGLOBADOS\n\nfrom shapely.geometry import Point\n\ndf1['isin_draw'] = df1.apply(lambda x: pol.contains(Point(x.lng,x.lat)), 1)*1 # create column indicating if point is in Polygon\ndf1 = df1[df1.isin_draw==True].copy()\nprint(df1.shape)\ndf1.head()",
"_____no_output_____"
]
],
[
[
"## Applying Uber H3 (hexagons)",
"_____no_output_____"
]
],
[
[
"print(df1.shape)\ndf1.head()",
"(2881294, 6)\n"
],
[
"import seaborn as sns\n\n# sns.boxplot(x=df311g.lng)\n\nsns.boxplot(x=df1.lat)",
"_____no_output_____"
],
[
"# REMOVING OUTLIER USING Z-SCORE (Opcional: Rodar apenas para visualizar melhor no mapa do matplotlib)\n\ndef remove_outlier(data_1, threshold=3):\n outliers=[]\n\n mean_1 = np.mean(data_1)\n std_1 =np.std(data_1)\n \n for y in data_1:\n z_score= (y - mean_1)/std_1 \n if np.abs(z_score) > threshold:\n outliers.append(y)\n output = [i for i in data_1 if i not in outliers]\n return output\n\nthreshold = 2.5\n\ndf1 = df1[df1.lng.isin(remove_outlier(df1.lng, threshold))]\ndf1 = df1[df1.lat.isin(remove_outlier(df1.lat, threshold))]",
"_____no_output_____"
],
[
"# CREATING HEXAGONS WITH CUSTOMIZED SIZE\n\nfrom h3 import h3\n\nAPERTURE_SIZE = 12\nhex_col = 'hex' + str(APERTURE_SIZE)\n\n# Functions\ndef plot_scatter(df, metric_col, x='lng_hex', y='lat_hex', marker='o', alpha=1, figsize=(17, 15), colormap='viridis'): \n #https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.plot.scatter.html\n df.plot.scatter(x=x, y=y, c=metric_col, edgecolors='none', colormap=colormap, marker=marker, alpha=alpha, figsize=figsize);\n plt.xticks([], []); plt.yticks([], [])\n\n# find hexs containing the points\ndf1[hex_col] = df1.apply(lambda x: h3.geo_to_h3(x.lat_long[0],x.lat_long[1],APERTURE_SIZE), 1)\n\n# aggregate the points\ndf1_ag = df1.groupby(hex_col).size().to_frame('count').reset_index()\n\n# find center of hex for visualization\ndf1_ag['lat_hex'] = df1_ag[hex_col].apply(lambda x: h3.h3_to_geo(x)[0])\ndf1_ag['lng_hex'] = df1_ag[hex_col].apply(lambda x: h3.h3_to_geo(x)[1])\n\n# plot the hexs\nplot_scatter(df1_ag, metric_col='count', figsize=(10,5))\n\n#print(df1.head())\n#print(df1_ag.head())",
"_____no_output_____"
],
[
"# VISUALIZING THE HEXAGONS\n\nfrom h3 import h3\n\ndef visualize_hexagons(hexagons, color=\"red\", folium_map=None, fill= None):\n \"\"\"\n hexagons is a list of hexcluster. Each hexcluster is a list of hexagons. \n eg. [[hex1, hex2], [hex3, hex4]]\n \"\"\"\n polylines = []\n lat = []\n lng = []\n for hex in hexagons:\n polygons = h3.h3_set_to_multi_polygon([hex], geo_json=False)\n # flatten polygons into loops.\n outlines = [loop for polygon in polygons for loop in polygon]\n polyline = [outline + [outline[0]] for outline in outlines][0]\n lat.extend(map(lambda v:v[0],polyline))\n lng.extend(map(lambda v:v[1],polyline))\n polylines.append(polyline)\n \n if folium_map is None:\n m = folium.Map(location=[sum(lat)/len(lat), sum(lng)/len(lng)], zoom_start=13, tiles='cartodbpositron')\n else:\n m = folium_map\n for polyline in polylines:\n my_PolyLine=folium.PolyLine(locations=polyline,weight=8,color=color,fill_color=fill)\n m.add_child(my_PolyLine)\n return m",
"_____no_output_____"
],
[
"hexagons = df1_ag[hex_col].tolist()\nm = visualize_hexagons(hexagons, color='red', folium_map=brasil)\nm.save(path + 'hexagons_plot_data.html')",
"_____no_output_____"
],
[
"# ELIMINANDO LINHAS DE ONIBUS COM APENAS 1 SINAL EMITIDO\n\naux = pd.DataFrame(df1.groupby('LINHA').size()).reset_index()\naux = aux.rename(columns={0:'count'})\ninclude = aux[aux['count']>1]['LINHA']\n\ndf1 = df1[df1.LINHA.isin(include)]\ndf1 = df1.sort_values('DATAHORA')\n\nprint(df1.shape)\ndf1.head()",
"(1867011, 9)\n"
],
[
"# FUNÇÃO PARA CALCULAR AS DISTÂNCIAS ENTRE OS SINAIS DE GPS EMITIDOS (AGRUPADO POR LINHA DE ÔNIBUS)\n\n# VECTORIZES HAVERSINE FUNCTION\n\ndef haversine2(lat1, lon1, lat2, lon2, to_radians=True, earth_radius=6371):\n \"\"\"\n slightly modified version: of http://stackoverflow.com/a/29546836/2901002\n\n Calculate the great circle distance between two points\n on the earth (specified in decimal degrees or in radians)\n\n All (lat, lon) coordinates must have numeric dtypes and be of equal length.\n\n \"\"\"\n if to_radians:\n lat1, lon1, lat2, lon2 = np.radians([lat1, lon1, lat2, lon2])\n\n a = np.sin((lat2-lat1)/2.0)**2 + \\\n np.cos(lat1) * np.cos(lat2) * np.sin((lon2-lon1)/2.0)**2\n\n distance_in_meters = earth_radius * (10**3) * 2 * np.arcsin(np.sqrt(a))\n\n return distance_in_meters\n\n# Ou dá para utilizar a função do pacote do Haversine\n\n#import haversine as hv\n#hv.haversine(df1.lat_long[1989351], df1.lat_long[3667807], unit = 'm')",
"_____no_output_____"
],
[
"import geopy as geo\nfrom geopy import distance\n\nstart = [41.49008, -71.312796]\nend = [41.499498, -81.695391]\n\nprint(geo.distance.geodesic(start, end).m)\n\nprint(haversine2(start[0], start[1], end[0], end[1]))",
"866455.4329098684\n864213.2735075523\n"
],
[
"# CALCULANDO AS DISTANCIAS (DELTA S) PELO GEODESIC DO GEOPY\n\ndf1 = df1.sort_values(['LINHA','DATAHORA'])\n\ngrp = df1.groupby('LINHA')\n\ndf1['shift_lat_long'] = grp.shift()['lat_long']\n\nlista = []\n\nfor i in df1.index:\n if 'n' in str(df1.shift_lat_long[i]).lower():\n lista.append(np.nan)\n else:\n lista.append(geo.distance.geodesic(df1.lat_long[i], df1.shift_lat_long[i]).m)\n \ndf1['delta_S'] = lista",
"_____no_output_____"
],
[
"df1.head()",
"_____no_output_____"
],
[
"# CALCULANDO AS DISTANCIAS (DELTA s) E OS TEMPOS (DELTA T)\n\ndf1 = df1.sort_values(['LINHA','DATAHORA'])\n\ngrp = df1.groupby('LINHA')\n\n# CALCULANDO DISTANCE TRAVELED\ndf1['delta_s'] = np.concatenate(grp.apply(lambda x: haversine2(x['lat'], x['lng'], x['lat'].shift(), x['lng'].shift()\n )\n ).values\n )\n \n# CALCULANDO TIME SPENT\ndf1['delta_t'] = (grp.apply(lambda x: x['DATAHORA'] - x['DATAHORA'].shift()).values)\n\ndf1['delta_t'] = df1.apply(lambda x: x['delta_t'].total_seconds(),1).values\n\n#df1['delta_t'] = df1['delta_t'].fillna(pd.Timedelta(seconds=0))\ndf1['delta_t'] = df1['delta_t'].fillna(0)\ndf1['delta_s'] = df1['delta_s'].fillna(0)\n\ndf1['delta_S'] = df1['delta_S'].fillna(0)\n\n#df1 = df1[df1.delta_s!=0]",
"_____no_output_____"
],
[
"df1.head()",
"_____no_output_____"
],
[
"# CALCULANDO AS ÁREAS DOS HEXÁGONOS E O TAMANHO DAS JANELAS DE TEMPO QUE DEVEM SER CONSIDERADAS\n\nfrom shapely.geometry.polygon import Polygon\n\ndf1['area_hexagon'] = df1.apply(lambda x: Polygon(h3.h3_to_geo_boundary(x[hex_col])).area,1) # o tamanho hex_col já foi definido anteriormente\n\ngrou = df1.groupby([pd.Grouper(freq='10min', key='DATAHORA'), hex_col]) # GROUP BY POR JANELA DE TEMPO e HEXAGONOS\n\ngraf = grou.agg({'delta_S': 'sum', 'delta_t': 'sum', 'area_hexagon':'first', 'LINHA':'nunique'})\n\ngraf = graf[graf.delta_S!=0]\n\ngraf = graf.reset_index()\n\nfrom sklearn import preprocessing\n\n\"\"\"\ngraf = graf[graf.speed != np.inf]\n\ndens = np.array([graf.LINHA/graf.area_hexagon])\nvel = np.array([graf.speed])\n\ngraf['density'] = preprocessing.normalize([dens][0])[0]\ngraf['speed'] = preprocessing.normalize([vel][0])[0]\n\ngraf = graf[['density', 'speed']]\ngraf.head()\n\"\"\"",
"_____no_output_____"
],
[
"graf.head(5)",
"_____no_output_____"
],
[
"graf['Q_i'] = graf.delta_S/(5*graf.area_hexagon*1e8) # DENSITY\ngraf['K_i'] = graf.delta_t/(5*graf.area_hexagon*1e8) # FLOW\n\n# A method to estimate the macroscopic fundamental diagram using limited mobile probe data\n\n#graf['Q_i'] = graf.delta_s/(5*100)\n#graf['K_i'] = graf.delta_t/(5*100)",
"_____no_output_____"
],
[
"graf.head()",
"_____no_output_____"
],
[
"grou2 = graf.groupby([hex_col, 'DATAHORA'])\ngraf2 = grou2.agg({'Q_i': 'sum', 'delta_t': 'sum', 'area_hexagon':'sum'})",
"_____no_output_____"
],
[
"def f_mi(x):\n \"\"\"\n Gera as variáveis de fluxo (Q), densidade (K) e velocidade (V)\"\"\"\n d = []\n d.append( ( (x['area_hexagon'] * x['Q_i']).sum() ) / ( x['area_hexagon'].sum() ) ) \n d.append( ( (x['area_hexagon'] * x['K_i']).sum() ) / ( x['area_hexagon'].sum() ) ) \n d.append( (( (x['area_hexagon'] * x['Q_i']).sum() ) / ( x['area_hexagon'].sum() )) / (( (x['area_hexagon'] * x['K_i']).sum() ) / ( x['area_hexagon'].sum() )) ) \n return pd.Series(d, index=['Q', 'K', 'V'])",
"_____no_output_____"
],
[
"graf2 = graf.groupby(['DATAHORA']).apply(f_mi)\ngraf2.head()",
"_____no_output_____"
],
[
"# CRIANDO COLUNA COM TURNO\n\nconditions = [\n (graf2.index.hour > 7) & (graf2.index.hour < 12),\n (graf2.index.hour > 14) & (graf2.index.hour < 19)]\n\nchoices = ['manha','tarde']\n\ngraf2['turno'] = np.select(conditions, choices, default='outro')\nprint(graf2.shape)\ngraf2.head()",
"(233, 4)\n"
],
[
"gr = graf2.reset_index()\n\ngr['time'] = gr.apply(lambda x: x['DATAHORA'].time(),1)\n\ngr = gr.reset_index(drop=1)",
"_____no_output_____"
],
[
"xlim = [0, 20]\nylim = [0, 700]\n\nfig, ax = plt.subplots()\nbp = graf2.plot(ax=ax, x='K', y='V', style='.', figsize=(20,8), alpha=1, xlim=xlim, ylim=None)\n#bp = gr.plot(ax=ax, x='V', y='K', style='.', figsize=(20,8), alpha=1)\n\n#bp = graf2.groupby('turno').plot(ax=ax, x=\"V\", y=\"K\", style='.', figsize=(20,8), alpha=1)",
"_____no_output_____"
],
[
"# PERÍODO DE 15 MINUTOS, HEX12\n\nxlim = [0, .019e11]\nylim = [0, 1200]\n\nfig, ax = plt.subplots()\n#bp = graf2.plot(ax=ax, x='K', y='Q', style='.', figsize=(20,8), alpha=1, xlim=xlim, ylim=ylim)\nbp = graf2.plot(ax=ax, x='Q', y='K', style='.', figsize=(20,8), alpha=1)\n\n#bp = graf.groupby('turno').plot(ax=ax, x='density', y='VELOCIDADE', style='.', figsize=(20,8), alpha=1)",
"_____no_output_____"
],
[
"# PERÍODO DE 15 MINUTOS, HEX12\n\nxlim = [0, .019e11]\nylim = [0, 1000]\n\nfig, ax = plt.subplots()\nbp = graf2.plot(ax=ax, x='K', y='V', style='.', figsize=(20,8), alpha=1, xlim=xlim, ylim=ylim)\n#bp = graf2.plot(ax=ax, x='K', y='V', style='.', figsize=(20,8), alpha=1)\n\n#bp = graf.groupby('turno').plot(ax=ax, x='density', y='VELOCIDADE', style='.', figsize=(20,8), alpha=1)",
"_____no_output_____"
],
[
"# PERÍODO DE 5 MINUTOS, HEX11\n\nxlim = [0, .019e10]\nylim = [0, 1000]\n\nfig, ax = plt.subplots()\nbp = graf2.plot(ax=ax, x='K', y='V', style='.', figsize=(20,8), alpha=1, xlim=xlim, ylim=ylim)\n#bp = graf2.plot(ax=ax, x='K', y='Q', style='.', figsize=(20,8), alpha=1)\n\n#bp = graf.groupby('turno').plot(ax=ax, x='density', y='VELOCIDADE', style='.', figsize=(20,8), alpha=1)",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a632476a82ec306219a68df97bbeb3c686a2118
| 5,199 |
ipynb
|
Jupyter Notebook
|
Sale_Price_Validation.ipynb
|
DanaPing/Portfolio
|
a3d504f2d42bc2f5274a850bfa4346ee0cecb745
|
[
"MIT"
] | null | null | null |
Sale_Price_Validation.ipynb
|
DanaPing/Portfolio
|
a3d504f2d42bc2f5274a850bfa4346ee0cecb745
|
[
"MIT"
] | null | null | null |
Sale_Price_Validation.ipynb
|
DanaPing/Portfolio
|
a3d504f2d42bc2f5274a850bfa4346ee0cecb745
|
[
"MIT"
] | null | null | null | 35.609589 | 236 | 0.4799 |
[
[
[
"<a href=\"https://colab.research.google.com/github/DanaPing/Portfolio/blob/main/Sale_Price_Validation.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
]
],
[
[
"# This program which when given an item's original price and percentage \n# it has been discounted, will compute the total price including goods \n# and services tax of the item on sale.\n\n\ndef input_data():\n ''' This function will prompt the uers for an item name, price and discount rate. \n The values will returned'''\n \n item_name = input(\"Please enter the item name: \")\n while True:\n try:\n original_price = int(input(\"Pease enter the original price: \"))\n discount_rate = int(input(\"Please enter the discount rate: \"))\n break\n except ValueError:\n print(\"Please input a number, do not use words!\")\n \n return item_name, original_price, discount_rate\n\n\ndef calculate_total_price(original_price, discount_rate):\n '''This funciton will calulate the amount saved and sale price given\n the original price and discount rate. A GST of 10% is applied''' \n \n # The goods and services tax is a constant\n GST = 10 \n \n amount_saved = original_price * ( discount_rate / 100)\n sale_price = original_price - amount_saved\n tax = sale_price * GST/100\n total_price = sale_price + tax\n \n return total_price, amount_saved, sale_price, tax\n\n\n\ndef output_results(item_name, original_price, discount_rate, amount_saved,\n sale_price, tax, total_price):\n '''This function will output the results of the discount in a nice format'''\n\n print('-'*30)\n print('------------------------------')\n print(\"Item Name:\" , item_name)\n print(\"Pre-sale price: \" , original_price)\n print(\"Discount Rate: \", discount_rate)\n print(\"Amount Saved: \", amount_saved)\n print(\"Sale Price: \", sale_price)\n print(\"Good and Services Tax: \", tax)\n print(\"Final Price is:\" , total_price)\n print('------------------------------')\n print('-'*30)\n\n\n# Main Program\n\n# Step 1: Input data\nname, price, discount = input_data()\n\n# Step 2: Perform the Calculations\ntotal, saved, sale, tax = calculate_total_price(price, discount)\n\n# Step 3: Output the results\noutput_results(name, price, discount, saved, sale, tax, total)",
"Please enter the item name: item name\nPease enter the original price: sd\nPlease input a number, do not use words!\nPease enter the original price: 2\nPlease enter the discount rate: sadjkf\nPlease input a number, do not use words!\nPease enter the original price: 23\nPlease enter the discount rate: 2\n------------------------------\n------------------------------\nItem Name: item name\nPre-sale price: 23\nDiscount Rate: 2\nAmount Saved: 0.46\nSale Price: 22.54\nGood and Services Tax: 2.2539999999999996\nFinal Price is: 24.793999999999997\n------------------------------\n------------------------------\n"
],
[
"",
"_____no_output_____"
]
]
] |
[
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code"
]
] |
4a6332595d467daa13811bd741e5cdccf4cafb3c
| 25,199 |
ipynb
|
Jupyter Notebook
|
scripts/d21-en/mxnet/chapter_deep-learning-computation/model-construction.ipynb
|
lucmertins/CapDeepLearningBook
|
e5959b552c8716e7fc65a21ae9c13c58509544c1
|
[
"MIT"
] | null | null | null |
scripts/d21-en/mxnet/chapter_deep-learning-computation/model-construction.ipynb
|
lucmertins/CapDeepLearningBook
|
e5959b552c8716e7fc65a21ae9c13c58509544c1
|
[
"MIT"
] | null | null | null |
scripts/d21-en/mxnet/chapter_deep-learning-computation/model-construction.ipynb
|
lucmertins/CapDeepLearningBook
|
e5959b552c8716e7fc65a21ae9c13c58509544c1
|
[
"MIT"
] | null | null | null | 35.391854 | 302 | 0.609389 |
[
[
[
"# Layers and Blocks\n:label:`sec_model_construction`\n\nWhen we first introduced neural networks,\nwe focused on linear models with a single output.\nHere, the entire model consists of just a single neuron.\nNote that a single neuron\n(i) takes some set of inputs;\n(ii) generates a corresponding scalar output;\nand (iii) has a set of associated parameters that can be updated\nto optimize some objective function of interest.\nThen, once we started thinking about networks with multiple outputs,\nwe leveraged vectorized arithmetic\nto characterize an entire layer of neurons.\nJust like individual neurons,\nlayers (i) take a set of inputs,\n(ii) generate corresponding outputs,\nand (iii) are described by a set of tunable parameters.\nWhen we worked through softmax regression,\na single layer was itself the model.\nHowever, even when we subsequently\nintroduced MLPs,\nwe could still think of the model as\nretaining this same basic structure.\n\nInterestingly, for MLPs,\nboth the entire model and its constituent layers\nshare this structure.\nThe entire model takes in raw inputs (the features),\ngenerates outputs (the predictions),\nand possesses parameters\n(the combined parameters from all constituent layers).\nLikewise, each individual layer ingests inputs\n(supplied by the previous layer)\ngenerates outputs (the inputs to the subsequent layer),\nand possesses a set of tunable parameters that are updated\naccording to the signal that flows backwards\nfrom the subsequent layer.\n\n\nWhile you might think that neurons, layers, and models\ngive us enough abstractions to go about our business,\nit turns out that we often find it convenient\nto speak about components that are\nlarger than an individual layer\nbut smaller than the entire model.\nFor example, the ResNet-152 architecture,\nwhich is wildly popular in computer vision,\npossesses hundreds of layers.\nThese layers consist of repeating patterns of *groups of layers*. Implementing such a network one layer at a time can grow tedious.\nThis concern is not just hypothetical---such\ndesign patterns are common in practice.\nThe ResNet architecture mentioned above\nwon the 2015 ImageNet and COCO computer vision competitions\nfor both recognition and detection :cite:`He.Zhang.Ren.ea.2016`\nand remains a go-to architecture for many vision tasks.\nSimilar architectures in which layers are arranged\nin various repeating patterns\nare now ubiquitous in other domains,\nincluding natural language processing and speech.\n\nTo implement these complex networks,\nwe introduce the concept of a neural network *block*.\nA block could describe a single layer,\na component consisting of multiple layers,\nor the entire model itself!\nOne benefit of working with the block abstraction\nis that they can be combined into larger artifacts,\noften recursively. This is illustrated in :numref:`fig_blocks`. By defining code to generate blocks\nof arbitrary complexity on demand,\nwe can write surprisingly compact code\nand still implement complex neural networks.\n\n\n:label:`fig_blocks`\n\n\nFrom a programing standpoint, a block is represented by a *class*.\nAny subclass of it must define a forward propagation function\nthat transforms its input into output\nand must store any necessary parameters.\nNote that some blocks do not require any parameters at all.\nFinally a block must possess a backpropagation function,\nfor purposes of calculating gradients.\nFortunately, due to some behind-the-scenes magic\nsupplied by the auto differentiation\n(introduced in :numref:`sec_autograd`)\nwhen defining our own block,\nwe only need to worry about parameters\nand the forward propagation function.\n\n[**To begin, we revisit the code\nthat we used to implement MLPs**]\n(:numref:`sec_mlp_concise`).\nThe following code generates a network\nwith one fully-connected hidden layer\nwith 256 units and ReLU activation,\nfollowed by a fully-connected output layer\nwith 10 units (no activation function).\n",
"_____no_output_____"
]
],
[
[
"from mxnet import np, npx\nfrom mxnet.gluon import nn\n\nnpx.set_np()\n\nnet = nn.Sequential()\nnet.add(nn.Dense(256, activation='relu'))\nnet.add(nn.Dense(10))\nnet.initialize()\n\nX = np.random.uniform(size=(2, 20))\nnet(X)",
"_____no_output_____"
]
],
[
[
"In this example, we constructed\nour model by instantiating an `nn.Sequential`,\nassigning the returned object to the `net` variable.\nNext, we repeatedly call its `add` function,\nappending layers in the order\nthat they should be executed.\nIn short, `nn.Sequential` defines a special kind of `Block`,\nthe class that presents a block in Gluon.\nIt maintains an ordered list of constituent `Block`s.\nThe `add` function simply facilitates\nthe addition of each successive `Block` to the list.\nNote that each layer is an instance of the `Dense` class\nwhich is itself a subclass of `Block`.\nThe forward propagation (`forward`) function is also remarkably simple:\nit chains each `Block` in the list together,\npassing the output of each as the input to the next.\nNote that until now, we have been invoking our models\nvia the construction `net(X)` to obtain their outputs.\nThis is actually just shorthand for `net.forward(X)`,\na slick Python trick achieved via\nthe `Block` class's `__call__` function.\n",
"_____no_output_____"
],
[
"## [**A Custom Block**]\n\nPerhaps the easiest way to develop intuition\nabout how a block works\nis to implement one ourselves.\nBefore we implement our own custom block,\nwe briefly summarize the basic functionality\nthat each block must provide:\n\n1. Ingest input data as arguments to its forward propagation function.\n1. Generate an output by having the forward propagation function return a value. Note that the output may have a different shape from the input. For example, the first fully-connected layer in our model above ingests an input of arbitrary dimension but returns an output of dimension 256.\n1. Calculate the gradient of its output with respect to its input, which can be accessed via its backpropagation function. Typically this happens automatically.\n1. Store and provide access to those parameters necessary\n to execute the forward propagation computation.\n1. Initialize model parameters as needed.\n\nIn the following snippet,\nwe code up a block from scratch\ncorresponding to an MLP\nwith one hidden layer with 256 hidden units,\nand a 10-dimensional output layer.\nNote that the `MLP` class below inherits the class that represents a block.\nWe will heavily rely on the parent class's functions,\nsupplying only our own constructor (the `__init__` function in Python) and the forward propagation function.\n",
"_____no_output_____"
]
],
[
[
"class MLP(nn.Block):\n # Declare a layer with model parameters. Here, we declare two\n # fully-connected layers\n def __init__(self, **kwargs):\n # Call the constructor of the `MLP` parent class `Block` to perform\n # the necessary initialization. In this way, other function arguments\n # can also be specified during class instantiation, such as the model\n # parameters, `params` (to be described later)\n super().__init__(**kwargs)\n self.hidden = nn.Dense(256, activation='relu') # Hidden layer\n self.out = nn.Dense(10) # Output layer\n\n # Define the forward propagation of the model, that is, how to return the\n # required model output based on the input `X`\n def forward(self, X):\n return self.out(self.hidden(X))",
"_____no_output_____"
]
],
[
[
"Let us first focus on the forward propagation function.\nNote that it takes `X` as the input,\ncalculates the hidden representation\nwith the activation function applied,\nand outputs its logits.\nIn this `MLP` implementation,\nboth layers are instance variables.\nTo see why this is reasonable, imagine\ninstantiating two MLPs, `net1` and `net2`,\nand training them on different data.\nNaturally, we would expect them\nto represent two different learned models.\n\nWe [**instantiate the MLP's layers**]\nin the constructor\n(**and subsequently invoke these layers**)\non each call to the forward propagation function.\nNote a few key details.\nFirst, our customized `__init__` function\ninvokes the parent class's `__init__` function\nvia `super().__init__()`\nsparing us the pain of restating\nboilerplate code applicable to most blocks.\nWe then instantiate our two fully-connected layers,\nassigning them to `self.hidden` and `self.out`.\nNote that unless we implement a new operator,\nwe need not worry about the backpropagation function\nor parameter initialization.\nThe system will generate these functions automatically.\nLet us try this out.\n",
"_____no_output_____"
]
],
[
[
"net = MLP()\nnet.initialize()\nnet(X)",
"_____no_output_____"
]
],
[
[
"A key virtue of the block abstraction is its versatility.\nWe can subclass a block to create layers\n(such as the fully-connected layer class),\nentire models (such as the `MLP` class above),\nor various components of intermediate complexity.\nWe exploit this versatility\nthroughout the following chapters,\nsuch as when addressing\nconvolutional neural networks.\n\n\n## [**The Sequential Block**]\n\nWe can now take a closer look\nat how the `Sequential` class works.\nRecall that `Sequential` was designed\nto daisy-chain other blocks together.\nTo build our own simplified `MySequential`,\nwe just need to define two key function:\n1. A function to append blocks one by one to a list.\n2. A forward propagation function to pass an input through the chain of blocks, in the same order as they were appended.\n\nThe following `MySequential` class delivers the same\nfunctionality of the default `Sequential` class.\n",
"_____no_output_____"
]
],
[
[
"class MySequential(nn.Block):\n def add(self, block):\n # Here, `block` is an instance of a `Block` subclass, and we assume\n # that it has a unique name. We save it in the member variable\n # `_children` of the `Block` class, and its type is OrderedDict. When\n # the `MySequential` instance calls the `initialize` function, the\n # system automatically initializes all members of `_children`\n self._children[block.name] = block\n\n def forward(self, X):\n # OrderedDict guarantees that members will be traversed in the order\n # they were added\n for block in self._children.values():\n X = block(X)\n return X",
"_____no_output_____"
]
],
[
[
"The `add` function adds a single block\nto the ordered dictionary `_children`.\nYou might wonder why every Gluon `Block`\npossesses a `_children` attribute\nand why we used it rather than just\ndefine a Python list ourselves.\nIn short the chief advantage of `_children`\nis that during our block's parameter initialization,\nGluon knows to look inside the `_children`\ndictionary to find sub-blocks whose\nparameters also need to be initialized.\n",
"_____no_output_____"
],
[
"When our `MySequential`'s forward propagation function is invoked,\neach added block is executed\nin the order in which they were added.\nWe can now reimplement an MLP\nusing our `MySequential` class.\n",
"_____no_output_____"
]
],
[
[
"net = MySequential()\nnet.add(nn.Dense(256, activation='relu'))\nnet.add(nn.Dense(10))\nnet.initialize()\nnet(X)",
"_____no_output_____"
]
],
[
[
"Note that this use of `MySequential`\nis identical to the code we previously wrote\nfor the `Sequential` class\n(as described in :numref:`sec_mlp_concise`).\n\n\n## [**Executing Code in the Forward Propagation Function**]\n\nThe `Sequential` class makes model construction easy,\nallowing us to assemble new architectures\nwithout having to define our own class.\nHowever, not all architectures are simple daisy chains.\nWhen greater flexibility is required,\nwe will want to define our own blocks.\nFor example, we might want to execute\nPython's control flow within the forward propagation function.\nMoreover, we might want to perform\narbitrary mathematical operations,\nnot simply relying on predefined neural network layers.\n\nYou might have noticed that until now,\nall of the operations in our networks\nhave acted upon our network's activations\nand its parameters.\nSometimes, however, we might want to\nincorporate terms\nthat are neither the result of previous layers\nnor updatable parameters.\nWe call these *constant parameters*.\nSay for example that we want a layer\nthat calculates the function\n$f(\\mathbf{x},\\mathbf{w}) = c \\cdot \\mathbf{w}^\\top \\mathbf{x}$,\nwhere $\\mathbf{x}$ is the input, $\\mathbf{w}$ is our parameter,\nand $c$ is some specified constant\nthat is not updated during optimization.\nSo we implement a `FixedHiddenMLP` class as follows.\n",
"_____no_output_____"
]
],
[
[
"class FixedHiddenMLP(nn.Block):\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n # Random weight parameters created with the `get_constant` function\n # are not updated during training (i.e., constant parameters)\n self.rand_weight = self.params.get_constant(\n 'rand_weight', np.random.uniform(size=(20, 20)))\n self.dense = nn.Dense(20, activation='relu')\n\n def forward(self, X):\n X = self.dense(X)\n # Use the created constant parameters, as well as the `relu` and `dot`\n # functions\n X = npx.relu(np.dot(X, self.rand_weight.data()) + 1)\n # Reuse the fully-connected layer. This is equivalent to sharing\n # parameters with two fully-connected layers\n X = self.dense(X)\n # Control flow\n while np.abs(X).sum() > 1:\n X /= 2\n return X.sum()",
"_____no_output_____"
]
],
[
[
"In this `FixedHiddenMLP` model,\nwe implement a hidden layer whose weights\n(`self.rand_weight`) are initialized randomly\nat instantiation and are thereafter constant.\nThis weight is not a model parameter\nand thus it is never updated by backpropagation.\nThe network then passes the output of this \"fixed\" layer\nthrough a fully-connected layer.\n\nNote that before returning the output,\nour model did something unusual.\nWe ran a while-loop, testing\non the condition its $L_1$ norm is larger than $1$,\nand dividing our output vector by $2$\nuntil it satisfied the condition.\nFinally, we returned the sum of the entries in `X`.\nTo our knowledge, no standard neural network\nperforms this operation.\nNote that this particular operation may not be useful\nin any real-world task.\nOur point is only to show you how to integrate\narbitrary code into the flow of your\nneural network computations.\n",
"_____no_output_____"
]
],
[
[
"net = FixedHiddenMLP()\nnet.initialize()\nnet(X)",
"_____no_output_____"
]
],
[
[
"We can [**mix and match various\nways of assembling blocks together.**]\nIn the following example, we nest blocks\nin some creative ways.\n",
"_____no_output_____"
]
],
[
[
"class NestMLP(nn.Block):\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n self.net = nn.Sequential()\n self.net.add(nn.Dense(64, activation='relu'),\n nn.Dense(32, activation='relu'))\n self.dense = nn.Dense(16, activation='relu')\n\n def forward(self, X):\n return self.dense(self.net(X))\n\nchimera = nn.Sequential()\nchimera.add(NestMLP(), nn.Dense(20), FixedHiddenMLP())\nchimera.initialize()\nchimera(X)",
"_____no_output_____"
]
],
[
[
"## Efficiency\n",
"_____no_output_____"
],
[
"The avid reader might start to worry\nabout the efficiency of some of these operations.\nAfter all, we have lots of dictionary lookups,\ncode execution, and lots of other Pythonic things\ntaking place in what is supposed to be\na high-performance deep learning library.\nThe problems of Python's [global interpreter lock](https://wiki.python.org/moin/GlobalInterpreterLock) are well known. \nIn the context of deep learning,\nwe may worry that our extremely fast GPU(s)\nmight have to wait until a puny CPU\nruns Python code before it gets another job to run.\nThe best way to speed up Python is by avoiding it altogether.\n\nOne way that Gluon does this is by allowing for\n*hybridization*, which will be described later.\nHere, the Python interpreter executes a block\nthe first time it is invoked.\nThe Gluon runtime records what is happening\nand the next time around it short-circuits calls to Python.\nThis can accelerate things considerably in some cases\nbut care needs to be taken when control flow (as above)\nleads down different branches on different passes through the net.\nWe recommend that the interested reader checks out\nthe hybridization section (:numref:`sec_hybridize`)\nto learn about compilation after finishing the current chapter.\n",
"_____no_output_____"
],
[
"## Summary\n\n* Layers are blocks.\n* Many layers can comprise a block.\n* Many blocks can comprise a block.\n* A block can contain code.\n* Blocks take care of lots of housekeeping, including parameter initialization and backpropagation.\n* Sequential concatenations of layers and blocks are handled by the `Sequential` block.\n\n\n## Exercises\n\n1. What kinds of problems will occur if you change `MySequential` to store blocks in a Python list?\n1. Implement a block that takes two blocks as an argument, say `net1` and `net2` and returns the concatenated output of both networks in the forward propagation. This is also called a parallel block.\n1. Assume that you want to concatenate multiple instances of the same network. Implement a factory function that generates multiple instances of the same block and build a larger network from it.\n",
"_____no_output_____"
],
[
"[Discussions](https://discuss.d2l.ai/t/54)\n",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
4a633bde0359eafd06ef9cb3689e0ab8af4ee91a
| 7,117 |
ipynb
|
Jupyter Notebook
|
analytical_solution.ipynb
|
bdevans/vcn_regularity
|
68d2fd4987183ff6cc3e380f6394175bc6ed1675
|
[
"MIT"
] | null | null | null |
analytical_solution.ipynb
|
bdevans/vcn_regularity
|
68d2fd4987183ff6cc3e380f6394175bc6ed1675
|
[
"MIT"
] | null | null | null |
analytical_solution.ipynb
|
bdevans/vcn_regularity
|
68d2fd4987183ff6cc3e380f6394175bc6ed1675
|
[
"MIT"
] | null | null | null | 35.059113 | 352 | 0.510889 |
[
[
[
"**Note.** *The following notebook contains code in addition to text and figures. By default, the code has been hidden. You can click the icon that looks like an eye in the toolbar above to show the code. To run the code, click the cell menu, then \"run all\".*",
"_____no_output_____"
]
],
[
[
"# Import packages, set preferences, etc.\n%matplotlib inline\nfrom brian2 import *\nimport ipywidgets as ipw\nfrom numpy.random import poisson\nfrom scipy.integrate import quad\nfrom scipy.special import erf\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nprefs.codegen.target = 'cython'\ndefaultclock.dt = 0.05*ms",
"_____no_output_____"
],
[
"%%html\n<!-- hack to improve styling of ipywidgets sliders -->\n<style type=\"text/css\">\n.widget-label {\n min-width: 35ex;\n max-width: 35ex;\n}\n.widget-hslider {\n width: 100%;\n}\n</style>",
"_____no_output_____"
]
],
[
[
"This notebook demonstrates the analytical solution to the diffusion approximation equations in [the basic model](basic_model.ipynb). These equations are from Brunel 2000 \"Dynamics of sparsely connected networks of excitatory and inhibitory spiking neurons\", appendix A, which cites Tuckwell 1988 \"Introduction to Theoretical Neurobiology\".\n\nWithout refractoriness, the mean interspike interval is\n\n$$m=\\tau\\sqrt{\\pi}\\int_{-\\mu/\\sigma}^{(1-\\mu)/\\sigma}e^{x^2}(1+\\mathrm{erf}(x))\\,\\mathrm{d}x$$\n\nso the firing rate is $1/m$. The CV is\n\n$$CV^2 = 2\\pi\\tau^2/m^2\\int_{-\\mu/\\sigma}^{(1-\\mu)/\\sigma}e^{x^2}\\int_{-\\infty}^x e^{y^2}(1+\\mathrm{erf}(y))^2\\,\\mathrm{d}y\\,\\mathrm{d}x$$\n\nWith refractoriness, the mean interspike interval is\n\n$$\\hat{m} = m+t_\\mathrm{ref}$$\n\nand the CV is\n\n$$\\hat{CV}=CV\\;\\hat{m}\\,/\\,m$$\n\nThe accuracy of this analytical formulation is demonstrated in the interactive figure below.",
"_____no_output_____"
]
],
[
[
"def analytical_fr_cv(mu, sigma, tau, refrac):\n ytheta = (1-mu)/sigma\n yr = -mu/sigma\n r0 = 1/(tau*sqrt(pi)*quad(lambda x: exp(x*x)*(1+erf(x)), yr, ytheta)[0])\n c = quad(lambda x: exp(x*x)*quad(lambda y: exp(y*y)*(1+erf(y))**2, -20, x)[0], yr, ytheta)[0]\n cv2 = 2*pi*tau**2*r0**2*c\n cv = sqrt(cv2)\n rate_ref = 1/(1/r0+refrac)\n cv_ref = cv*rate_ref/r0\n return rate_ref, cv_ref\n\ndef reduced_model(mu=1.5, sigma=0.5, tau_ms=10, t_ref_ms=0.1):\n # Set parameters\n repeats = 1000\n duration = 1000*ms\n tau = tau_ms*ms\n t_ref = t_ref_ms*ms\n # Define and run the model\n eqs = '''\n dv/dt = (mu-v)/tau+sigma*xi*tau**-0.5 : 1 (unless refractory)\n '''\n G = NeuronGroup(repeats, eqs, threshold='v>1', reset='v=0',\n refractory=t_ref, method='euler')\n spikemon = SpikeMonitor(G)\n statemon = StateMonitor(G, 'v', record=[0])\n run(duration)\n # Compute ISI histograms\n isi = []\n for train in spikemon.spike_trains().values():\n train.sort()\n isi.append(diff(train))\n isi = hstack(isi)\n cv = std(isi)/mean(isi)\n # Plot results\n figure(figsize=(10, 2.5))\n subplot(131)\n plot(spikemon.t/ms, spikemon.i, ',k')\n xlabel('Time (ms)')\n ylabel('Repeat number')\n title('Spike raster plot')\n xlim(0, duration/ms)\n ylim(0, repeats)\n \n subplot(132)\n plot(statemon.t[:1000]/ms, statemon.v.T[:1000], '-k')\n xlabel('Time (ms)')\n ylabel('v')\n title('Membrane potential trace')\n #xlim(0, duration/ms)\n ylim(-0.2, 1.2)\n axhline(0, ls=':', c='r')\n axhline(1, ls=':', c='g')\n \n subplot(133)\n hist(isi/ms, fc='k', bins=arange(60)*0.5)\n yticks([])\n ylabel('Frequency')\n xlabel('ISI (ms)')\n title('Interspike interval histogram')\n #title('CV = %.2f' % cv)\n text(0.95, 0.9, 'CV = %.2f' % cv, ha='right', va='top',\n bbox=dict(facecolor='white'),\n transform=gca().transAxes)\n tight_layout()\n \n sim_fr = spikemon.num_spikes/(duration*repeats)\n sim_cv = cv\n an_fr, an_cv = analytical_fr_cv(mu, sigma, tau, t_ref)\n print 'Firing rate: simulated=%d sp/s, analytical=%d sp/s' % (sim_fr, an_fr)\n print 'CV: simulated=%.2f, analytical=%.2f' % (sim_cv, an_cv)\n\ndisplay(ipw.interact(reduced_model,\n tau_ms=ipw.FloatSlider(\n min=0.1, max=20.0, step=0.1, value=10.0,\n continuous_update=False,\n description=r\"Membrane time constant $\\tau$ (ms)\"),\n t_ref_ms=ipw.FloatSlider(\n min=0, max=5, step=0.05, value=0.1,\n continuous_update=False,\n description=r\"Refractory period $t_\\mathrm{ref}$ (ms)\"),\n mu=ipw.FloatSlider(\n min=0, max=5, step=0.05, value=1.5,\n continuous_update=False,\n description=r\"Mean current $\\mu$\"),\n sigma=ipw.FloatSlider(\n min=0, max=5, step=0.05, value=0.5,\n continuous_update=False,\n description=r\"Standard deviation of current $\\sigma$\"),\n ));",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a63507ec691bb94afb8cef142bc274dc122ca59
| 60,413 |
ipynb
|
Jupyter Notebook
|
project-2.ipynb
|
Chenyitong33/Statistical-machine-learning
|
4e3a9afe49b54c26033ad750795353aa61bac503
|
[
"FTL"
] | null | null | null |
project-2.ipynb
|
Chenyitong33/Statistical-machine-learning
|
4e3a9afe49b54c26033ad750795353aa61bac503
|
[
"FTL"
] | null | null | null |
project-2.ipynb
|
Chenyitong33/Statistical-machine-learning
|
4e3a9afe49b54c26033ad750795353aa61bac503
|
[
"FTL"
] | null | null | null | 40.930217 | 783 | 0.594326 |
[
[
[
"# Project 2: Digit Recognition\n\n## Statistical Machine Learning (COMP90051), Semester 2, 2017\n\n*Copyright the University of Melbourne, 2017*",
"_____no_output_____"
],
[
"### Submitted by: Yitong Chen\n### Student number: 879326\n### Kaggle-in-class username: *YitongChen*",
"_____no_output_____"
],
[
"In this project, you will be applying machine learning for recognising digits from real world images. The project worksheet is a combination of text, pre-implemented code and placeholders where we expect you to add your code and answers. You code should produce desired result within a reasonable amount of time. Please follow the instructions carefully, **write your code and give answers only where specifically asked**. In addition to worksheet completion, you are also expected to participate **live competition with other students in the class**. The competition will be run using an on-line platform called Kaggle.",
"_____no_output_____"
],
[
"** Marking:** You can get up to 33 marks for Project 2. The sum of marks for Project 1 and Project 2 is then capped to 50 marks\n\n**Due date:** Wednesday 11/Oct/17, 11:59pm AEST (LMS components); and Kaggle competition closes Monday 09/Oct/17, 11:59pm AEST.\n\n**Late submissions** will incur a 10% penalty per calendar day\n\n** Submission materials**\n - **Worksheet**: Fill in your code and answers within this IPython Notebook worksheet.\n - **Competition**: Follow the instructions provided in the corresponding section of this worksheet. Your competition submissions should be made via Kaggle website.\n - **Report**: The report about your competition entry should be submitted to the LMS as a PDF file (see format requirements in `2.2`).\n - **Code**: The source code behind your competition entry.\nThe **Worksheet**, **Report** and **Code** should be bundled into a `.zip` file (not 7z, rar, tar, etc) and submitted in the LMS. Marks will be deducted for submitting files in other formats, or we may elect not to mark them at all.\n\n**Academic Misconduct:** Your submission should contain only your own work and ideas. Where asked to write code, you cannot re-use someone else's code, and should write your own implementation. We will be checking submissions for originality and will invoke the University’s <a href=\"http://academichonesty.unimelb.edu.au/policy.html\">Academic Misconduct policy</a> where inappropriate levels of collusion or plagiarism are deemed to have taken place.",
"_____no_output_____"
],
[
"**Table of Contents**\n\n1. Handwritten Digit Recognition **(16 marks)**\n 1. Linear Approach\n 2. Basis Expansion\n 3. Kernel Perceptron\n 4. Dimensionality Reduction\n \n2. Kaggle Competition **(17 marks)**\n 1. Making Submissions\n 2. Method Description",
"_____no_output_____"
],
[
"## 1. Handwritten Digit Recognition\nHandwritten digit recognition can be framed as a classification task: given a bitmap image as input, predict the digit type (0, 1, ..., 9). The pixel values in each position of the image form our features, and the digit type is the class. We are going to use a dataset where the digits are represented as *28 x 28* bitmap images. Each pixel value ranges between 0 and 1, and represents the monochrome ink intensity at that position. Each image matrix has been flattened into one long feature vector, by concatenating each row of pixels.\n\nIn this part of the project, we will only use images of two digits, namely \"7\" and \"9\". As such, we will be working on a binary classification problem. *Throughout this first section, our solution is going to be based on the perceptron classifier.*\n\nStart by setting up working environment, and loading the dataset. *Do not override variable `digits`, as this will be used throughout this section.*",
"_____no_output_____"
]
],
[
[
"%pylab inline\n\ndigits = np.loadtxt('digits_7_vs_9.csv', delimiter=' ')",
"_____no_output_____"
]
],
[
[
"Take some time to explore the dataset. Note that each image of \"7\" is labeled as -1, and each image of \"9\" is labeled as +1.",
"_____no_output_____"
]
],
[
[
"# extract a stack of 28x28 bitmaps\nX = digits[:, 0:784]\n# extract labels for each bitmap\ny = digits[:, 784:785]\n# display a single bitmap and print its label\nbitmap_index = 0\nplt.imshow(X[bitmap_index,:].reshape(28, 28), interpolation=None)\nprint(y[bitmap_index])",
"_____no_output_____"
]
],
[
[
"You can also display several bitmaps at once using the following code.",
"_____no_output_____"
]
],
[
[
"def gallery(array, ncols):\n nindex, height, width = array.shape\n nrows = nindex//ncols\n result = (array.reshape((nrows, ncols, height, width))\n .swapaxes(1,2)\n .reshape((height*nrows, width*ncols)))\n return result\n\nncols = 10\nresult = gallery(X.reshape((300, 28, 28))[:ncols**2], ncols)\nplt.figure(figsize=(10,10))\nplt.imshow(result, interpolation=None)",
"_____no_output_____"
]
],
[
[
"### 1.1 Linear Approach\nWe are going to use perceptron for our binary classification task. Recall that perceptron is a linear method. Also, for this first step, we will not apply non-linear transformations to the data.\n\nImplement and fit a perceptron to the data above. You may use the implementation from *sklearn*, or implementation from one of our workshops. Report the error of the fit as the proportion of misclassified examples.\n\n<br />\n\n<font color='red'>**Write your code in the cell below ...**</font>",
"_____no_output_____"
]
],
[
[
"## your code here\n# To do this, simply concatenate a column of 1s to the data matrix.\nPhi = np.column_stack([np.ones(X.shape[0]), X])\n# Prediction function\ndef perc_pred(phi, w):\n return np.sign(np.sign(np.dot(phi, w)) + 0.5)\n\n# Training algorithm\ndef train(data, target, epochs, w , eta= 1.):\n for e in range(epochs):\n for i in range(data.shape[0]):\n yhat = perc_pred(data[i,:], w)\n if yhat != target[i]:\n w += eta * target[i] * data[i]\n return w\n\n# Run the training algorithm for 100 epochs to learn the weights\nw = np.zeros(Phi.shape[1])\nw = train(Phi, y, 100, w)\nprint(w.shape)\n\nError = np.sum(perc_pred(Phi, w).reshape(y.shape[0],1) != y) / float(y.shape[0])\nprint(Error)",
"_____no_output_____"
]
],
[
[
"One of the advantages of a linear approach is the ability to interpret results. To this end, plot the parameters learned above. Exclude the bias term if you were using it, set $w$ to be the learned perceptron weights, and run the following command.",
"_____no_output_____"
]
],
[
[
"#print(w.reshape(1,785))\nw = np.delete(w.reshape(1,785),0,1)\nplt.imshow(w.reshape(28,28), interpolation=None)",
"_____no_output_____"
]
],
[
[
"In a few sentences, describe what you see, referencing which features are most important for making classification. Report any evidence of overfitting.",
"_____no_output_____"
],
[
"<font color='red'>**Write your answer here ...**</font> (as a *markdown* cell)",
"_____no_output_____"
],
[
"The importance of a feature is captured by computing how much the learned model depends on, as the epoch here.\nThe result is blurry, and the error proportion of misclassified examples is always 0.",
"_____no_output_____"
],
[
"Split the data into training and heldout validation partitions by holding out a random 25% sample of the data. Evaluate the error over the course of a training run, and plot the training and validation error rates as a function of the number of passes over the training dataset.\n\n<br />\n<font color='red'>**Write your code in the cell below ...**</font>",
"_____no_output_____"
]
],
[
[
"## your code here\nfrom sklearn.model_selection import train_test_split\n\nphi_train, phi_test, y_train, y_test = train_test_split(Phi, y, \n test_size=0.25, \n random_state=0)\n\nw_hat = np.zeros(Phi.shape[1])\nT = 60\ntrain_error = np.zeros(T)\nheldout_error = np.zeros(T)\nfor ep in range(T):\n # here we use a learning rate, which decays with each epoch\n lr = 1./(1+ep)\n w_hat = train(Phi, y, 1, w_hat, eta = lr )\n #print(w_hat)\n train_error[ep] = np.sum(perc_pred(phi_train, w_hat).reshape(y_train.shape[0],1) != y_train) / np.float(y_train.shape[0])\n heldout_error[ep] = np.sum(perc_pred(phi_test, w_hat).reshape(y_test.shape[0],1) != y_test) / np.float(y_test.shape[0])\n\nplot(train_error, label = 'Train Error')\nplot(heldout_error, label = 'Held-out Error')\nplt.legend()\nxlabel('Epochs')\nylabel('Error')",
"_____no_output_____"
]
],
[
[
"In a few sentences, describe the shape of the curves, and compare the two. Now consider if we were to stop training early, can you choose a point such that you get the best classification performance? Justify your choice.",
"_____no_output_____"
],
[
"<font color='red'>**Write your answer here ...**</font> (as a *markdown* cell)",
"_____no_output_____"
],
[
"The shape was at a very high point at the beginning, and then went down sharply, but it increased a little at around 7 epoches, finally decreased to an even low level at around 9, so it is not good to stop training early. We may choose the point as 10, which is a reasonable low point for both train and holdout error rate.",
"_____no_output_____"
],
[
"Now that we have tried a simple approach, we are going to implement several non-linear approaches to our task. Note that we are still going to use a linear method (the perceptron), but combine this with a non-linear data transformation. We start with basis expansion.",
"_____no_output_____"
],
[
"### 1.2 Basis Expansion\nApply Radial Basis Function (RBF)-based transformation to the data, and fit a perceptron model. Recall that the RBF basis is defined as\n\n$$\\varphi_l(\\mathbf{x}) = \\exp\\left(-\\frac{||\\mathbf{x} - \\mathbf{z}_l||^2}{\\sigma^2}\\right)$$\n\nwhere $\\mathbf{z}_l$ is centre of the $l^{th}$ RBF. We'll use $L$ RBFs, such that $\\varphi(\\mathbf{x})$ is a vector with $L$ elements. The spread parameter $\\sigma$ will be the same for each RBF.\n\n*Hint: You will need to choose the values for $\\mathbf{z}_l$ and $\\sigma$. If the input data were 1D, the centres $\\mathbf{z}_l$ could be uniformly spaced on a line. However, here we have 784-dimensional input. For this reason you might want to use some of the training points as centres, e.g., $L$ randomly chosen \"2\"s and \"7\"s.*\n\n<br />\n\n<font color='red'>**Write your code in the cell below ...**</font>",
"_____no_output_____"
]
],
[
[
"## your code here\n#Each RBF basis function should take x (a 784 dimensional vector), and return a scalar. \n#Each RBF will be parameterised by a centre (784 dimensional vector) and a length scale (scalar). \n#The return scalar is computed based on the distance between x and the centre, as shown in the mathematical formulation. \n#Consequently phi(x) should return a vector of size L, and accordingly Phi(X) for the whole dataset will be a matrix N x L. \n\n# Input:\n# x - is a column vector of input values\n# z - is a scalar that controls location\n# s - is a scalar that controls spread\n#\n# Output:\n# v - contains the values of RBF evaluated for each element x\n# v has the same dimensionality as x\ndef radial_basis_function(x, z, s):\n # ensure that t is a column vector\n '''\n x = np.array(x)\n if x.size == 1:\n x.shape = (1,1)\n else:\n x_length = x.shape[0]\n x.shape = (x_length, 1)\n '''\n # compute RBF value\n r = np.linalg.norm(x - z)\n v = np.exp(-r**2/(s**2))\n return v\n\n# Input:\n# x - is an Nx784 column vector\n# z - is an Lx784 column vector with locations for each of M RBFs\n# s - is a scalar that controls spread, shared between all RBFs\n#\n# Output:\n# Phi - is an NxL matrix, such that Phi(i,j) is the \n# RBF transformation of x(i) using location z(j) and scale s\ndef expand_to_RBF(x, z, s):\n #... your code here ...\n #... in your code use \"radial_basis_function\" from above ...\n L = z.shape[0]\n N = x.shape[0]\n Phi = np.zeros((N, L))\n for i in range(N):\n for j in range(L):\n \n #y_rbf = radial_basis_function(x_rbf, z[i], sigma)\n v = radial_basis_function(x[i], z[j], sigma)\n Phi[i,j] = v\n \n \n return Phi\n\n# set L to 60 and sigma to 0.01\nl = 60\nz = X[np.random.choice(X.shape[0], l, replace=False), :]\nsigma = 0.01 # same scale for each RBF\n\n# use \"expand_to_RBF\" function from above\nx = expand_to_RBF(X, z, sigma)\nprint(x.shape)\nx_dummy = np.ones(X.shape[0])\nX_expand = np.column_stack((x_dummy, x))\nprint(X_expand.shape)\n\n# Run the training algorithm for 100 epochs to learn the weights\nw = np.zeros(X_expand.shape[1])\nw = train(X_expand, y, 100, w)\nprint(w.shape)\n\nError = np.sum(perc_pred(X_expand, w).reshape(y.shape[0],1) != y) / float(y.shape[0])\nprint(Error)\n\n",
"_____no_output_____"
]
],
[
[
"Now compute the validation error for your RBF-perceptron and use this to choose good values of $L$ and $\\sigma$. Show a plot of the effect of changing each of these parameters, and justify your parameter choice.\n\n<br />\n\n<font color='red'>**Write your code in the cell below ...**</font>",
"_____no_output_____"
]
],
[
[
"## your code here\n## when sigma is always 0.01\nsigma = 0.01 # same scale for each RBF\ntrain_error = np.zeros(300)\nheldout_error = np.zeros(300)\nfor L in range(300):\n z = X[np.random.choice(X.shape[0], L, replace=False), :]\n x = expand_to_RBF(X, z, sigma)\n #print(x.shape)\n x_dummy = np.ones(X.shape[0])\n X_expand = np.column_stack((x_dummy, x))\n w_hat = np.zeros(X_expand.shape[1])\n w_hat = train(X_expand, y, 100, w_hat)\n \n phi_train, phi_test, y_train, y_test = train_test_split(X_expand, y, \n test_size=0.25, \n random_state=0)\n #print(w_hat)\n train_error[L] = np.sum(perc_pred(phi_train, w_hat).reshape(y_train.shape[0],1) != y_train) / np.float(y_train.shape[0])\n heldout_error[L] = np.sum(perc_pred(phi_test, w_hat).reshape(y_test.shape[0],1) != y_test) / np.float(y_test.shape[0])\nplt.title('Errors for sigma = 0.01')\nplot(train_error, label = 'Train Error')\nplot(heldout_error, label = 'Held-out Error')\nplt.legend()\nxlabel('number of L')\nylabel('Error')",
"_____no_output_____"
],
[
"# when L is always 300\nl = 300\ntrain_error_list = []\nheldout_error_list = []\nsigmas = [1e-10, 1e-6, 1e-4, 1e-2, 1, 100]\nfor n, s in enumerate(sigmas):\n z = X[np.random.choice(X.shape[0], l, replace=False), :]\n x = expand_to_RBF(X, z, s)\n #print(x.shape)\n x_dummy = np.ones(X.shape[0])\n X_expand = np.column_stack((x_dummy, x))\n w_hat = np.zeros(X_expand.shape[1])\n w_hat = train(X_expand, y, 100, w_hat)\n \n phi_train, phi_test, y_train, y_test = train_test_split(X_expand, y, \n test_size=0.25, \n random_state=0)\n #print(w_hat)\n train_error_list.append(np.sum(perc_pred(phi_train, w_hat).reshape(y_train.shape[0],1) != y_train) / np.float(y_train.shape[0]))\n heldout_error_list.append(np.sum(perc_pred(phi_test, w_hat).reshape(y_test.shape[0],1) != y_test) / np.float(y_test.shape[0]))\n \nplt.title('Errors for L = 300')\nplt.plot(sigmas[:6], np.asarray(train_error_list))\nplt.plot(sigmas[:6], np.asarray(heldout_error_list))\nplt.xlim((min(sigmas), max(sigmas)))\nplt.xscale('log')\nplt.ylim(0, 1)\n#plot(heldout_error, label = 'Held-out Error')\nxlabel('sigma')\nylabel('Error')",
"_____no_output_____"
]
],
[
[
"<font color='red'>**Write your justfication here ...**</font> (as a *markdown* cell)",
"_____no_output_____"
],
[
"From the first plot, we can just set sigma to 0.01 at first, and found that the error rate went down when L became close to 300, thus we set the L to 300 when testing for sigma, as in the second plot. \nFrom the second plot, it seems that the sigma has no effect on the performance.\nAs the result, we will just choose L as 300 with sigma 0.01.",
"_____no_output_____"
],
[
"### 1.3 Kernel Perceptron\nNext, instead of directly computing a feature space transformation, we are going to use the kernel trick. Specifically, we are going to use the kernelised version of perceptron in combination with a few different kernels.\n\n*In this section, you cannot use any libraries other than `numpy` and `matplotlib`.*\n\nFirst, implement linear, polynomial and RBF kernels. The linear kernel is simply a dot product of its inputs, i.e., there is no feature space transformation. Polynomial and RBF kernels should be implemented as defined in the lecture slides.\n\n<br />\n\n<font color='red'>**Write your code in the cell below ...**</font>",
"_____no_output_____"
]
],
[
[
"# Input:\n# u,v - column vectors of the same dimensionality\n#\n# Output:\n# v - a scalar\ndef linear_kernel(u, v):\n ## your code here\n z = np.dot(u.T, v)\n return z\n# Input:\n# u,v - column vectors of the same dimensionality\n# c,d - scalar parameters of the kernel as defined in lecture slides\n#\n# Output:\n# v - a scalar\ndef polynomial_kernel(u, v, c=0, d=3):\n ## your code here\n z = (np.dot(u.T, v)+c)**d\n return z \n \n\n# Input:\n# u,v - column vectors of the same dimensionality\n# gamma - scalar parameter of the kernel as defined in lecture slides\n#\n# Output:\n# v - a scalar\ndef rbf_kernel(u, v, gamma=1):\n ## your code here\n r = np.linalg.norm(u - v)\n v = np.exp(-gamma*(r**2))\n return v",
"_____no_output_____"
]
],
[
[
"Kernel perceptron was a \"green slides\" topic, and you will not be asked about this method in the exam. Here, you are only asked to implement a simple prediction function following the provided equation. In kernel perceptron, the prediction for instance $\\mathbf{x}$ is made based on the sign of\n\n$$w_0 + \\sum_{i=1}^{n}\\alpha_i y_i K(\\mathbf{x}_i, \\mathbf{x})$$\n\nHere $w_0$ is the bias term, $n$ is the number of training examples, $\\alpha_i$ are learned weights, $\\mathbf{x}_i$ and $y_i$ is the training dataset,and $K$ is the kernel.\n\n<br />\n\n<font color='red'>**Write your code in the cell below ...**</font>",
"_____no_output_____"
]
],
[
[
"# Input:\n# x_test - (r x m) matrix with instances for which to predict labels\n# X - (n x m) matrix with training instances in rows\n# y - (n x 1) vector with labels\n# alpha - (n x 1) vector with learned weigths\n# bias - scalar bias term\n# kernel - a kernel function that follows the same prototype as each of the three kernels defined above\n#\n# Output:\n# y_pred - (r x 1) vector of predicted labels\n\ndef kernel_ptron_predict(x_test, X, y, alpha, bias, kernel, c=0 ,d=3 ,gamma=1):\n ## your code here\n # test x_test is a matrix or vector\n if x_test.size == 784:\n R = 1\n else:\n R = x_test.shape[0]\n #R = int(x_test.shape[0]/784)\n #print(R)\n x_test = x_test.reshape(R,784)\n N = X.shape[0]\n y_pred = np.zeros((R,1))\n for i in range(R):\n for j in range(N):\n if kernel == linear_kernel:\n y_pred[i] += alpha[j]*y[j]*kernel(x_test[i],X[j])\n elif kernel == polynomial_kernel:\n y_pred[i] += alpha[j]*y[j]*kernel(x_test[i],X[j],c,d)\n else:\n y_pred[i] += alpha[j]*y[j]*kernel(x_test[i],X[j],gamma)\n y_pred[i] += bias\n y_pred[i]= np.sign(y_pred[i])\n return y_pred\n",
"_____no_output_____"
]
],
[
[
"The code for kernel perceptron training is provided below. You can treat this function as a black box, but we encourage you to understand the implementation.",
"_____no_output_____"
]
],
[
[
"# Input:\n# X - (n x m) matrix with training instances in rows\n# y - (n x 1) vector with labels\n# kernel - a kernel function that follows the same prototype as each of the three kernels defined above\n# epochs - scalar, number of epochs\n#\n# Output:\n# alpha - (n x 1) vector with learned weigths\n# bias - scalar bias term\ndef kernel_ptron_train(X, y, kernel, epochs=100):\n n, m = X.shape\n alpha = np.zeros(n)\n bias = 0\n updates = None\n for epoch in range(epochs):\n print('epoch =', epoch, ', updates =', updates)\n updates = 0\n\n schedule = list(range(n))\n np.random.shuffle(schedule)\n for i in schedule:\n y_pred = kernel_ptron_predict(X[i], X, y, alpha, bias, kernel)\n \n if y_pred != y[i]:\n alpha[i] += 1\n bias += y[i]\n updates += 1\n\n if updates == 0:\n break\n \n return alpha, bias",
"_____no_output_____"
]
],
[
[
"Now use the above functions to train the perceptron. Use heldout validation, and compute the validation error for this method using each of the three kernels. Write a paragraph or two with analysis how the accuracy differs between the different kernels and choice of kernel parameters. Discuss the merits of a kernel approach versus direct basis expansion approach as was used in the previous section.\n\n<br />\n\n<font color='red'>**Write your code in the cell below ...**</font>",
"_____no_output_____"
]
],
[
[
"phi_train, phi_test, y_train, y_test = train_test_split(X, y, \n test_size=0.25, \n random_state=0)\n# Linear kernel\nprint('LINEAR')\nalpha, bias = kernel_ptron_train(phi_train, y_train, linear_kernel)\n# exclude bias term\n#bias = 0\nError = np.sum(kernel_ptron_predict(phi_test, phi_train, y_train, alpha, bias, linear_kernel) != y_test) / float(y_test.shape[0])\nprint('Error =',Error)\n#print(kernel_ptron_predict(phi_test, phi_train, y_train, alpha, bias, linear_kernel))\n\n# Polynomial kernel\nprint('POLYNOMIAL')\nalpha, bias = kernel_ptron_train(phi_train, y_train, polynomial_kernel)\n# exclude bias term\n#bias = 0\nd_list = [0, 1, 10, 100]\nfor n, s in enumerate(d_list):\n Error = np.sum(kernel_ptron_predict(phi_test, phi_train, y_train, alpha, bias, polynomial_kernel, d = s) != y_test) / float(y_test.shape[0])\n print('d:',s,'Error =',Error)\n\nc_list = [0, 1e-2, 1, 10, 100]\nfor n, s in enumerate(c_list):\n Error = np.sum(kernel_ptron_predict(phi_test, phi_train, y_train, alpha, bias, polynomial_kernel, c = s) != y_test) / float(y_test.shape[0])\n print('c:',s,'Error =',Error)\n#print(kernel_ptron_predict(phi_test, phi_train, y_train, alpha, bias, polynomial_kernel))\n\n# RBF kernel\nprint('RBF')\nalpha, bias = kernel_ptron_train(phi_train, y_train, rbf_kernel)\n# exclude bias term\n#bias = 0\ng_list = [ 1e-10, 1e-6, 1e-4, 1e-2, 1, 10, 100]\nfor n, s in enumerate(g_list):\n Error = np.sum(kernel_ptron_predict(phi_test, phi_train, y_train, alpha, bias, rbf_kernel, gamma = s) != y_test) / float(y_test.shape[0])\n print('g:',s,'Error =',Error)\n\n#print(kernel_ptron_predict(phi_test, phi_train, y_train, alpha, bias, rbf_kernel))",
"_____no_output_____"
]
],
[
[
"<font color='red'>**Provide your analysis here ...**</font> (as a *markdown* cell)",
"_____no_output_____"
],
[
"It seems that the accuracy of the polynomial and rbf kernel are slightly better than the linear method, but they requir the good choice of parameters.\nAs for the polynomial kernel, it may have the best accuracy when c is 10 as well as d is 0-1.\nAs for the rbf kernel, it could have the best performance when gamma is 1.\nThe basis expansion relies on a large number of L, whereas the kernel approach is much easier to get a high accuracy.",
"_____no_output_____"
],
[
"### 1.4 Dimensionality Reduction\nYet another approach to working with complex data is to use a non-linear dimensionality reduction. To see how this might work, first apply a couple of dimensionality reduction methods and inspect the results.",
"_____no_output_____"
]
],
[
[
"from sklearn import manifold\n# extract a stack of 28x28 bitmaps\n#X = digits[:, 0:784]\n# extract labels for each bitmap\ny = digits[:, 784:785]\n#print(y)\n\nX = digits[:, 0:784]\ny = np.squeeze(digits[:, 784:785])\n\nprint(y)\n# n_components refers to the number of dimensions after mapping\n# n_neighbors is used for graph construction\nX_iso = manifold.Isomap(n_neighbors=30, n_components=2).fit_transform(X)\n\n\n# n_components refers to the number of dimensions after mapping\nembedder = manifold.SpectralEmbedding(n_components=2, random_state=0)\nX_se = embedder.fit_transform(X)\n\nf, (ax1, ax2) = plt.subplots(1, 2)\nax1.plot(X_iso[y==-1,0], X_iso[y==-1,1], \"bo\")\nax1.plot(X_iso[y==1,0], X_iso[y==1,1], \"ro\")\nax1.set_title('Isomap')\nax2.plot(X_se[y==-1,0], X_se[y==-1,1], \"bo\")\nax2.plot(X_se[y==1,0], X_se[y==1,1], \"ro\")\nax2.set_title('spectral')",
"_____no_output_____"
]
],
[
[
"In a few sentences, explain how a dimensionality reduction algorithm can be used for your binary classification task.",
"_____no_output_____"
],
[
"<font color='red'>**Write your answer here ...**</font> (as a *markdown* cell)",
"_____no_output_____"
],
[
"There are two fields in dimension reduction, linear techniques that use a linear mapping to reduce the dimension, and nonlinear technique makes the assumption that the data available is embedded on a manifold (or surface in lower dimensional space). The data is then mapped onto a lower-dimensional manifold for more efficient processing.\n",
"_____no_output_____"
],
[
"Implement such an approach and assess the result. For simplicity, we will assume that both training and test data are available ahead of time, and thus the datasets should be used together for dimensionality reduction, after which you can split off a test set for measuring generalisation error. *Hint: you do not have to reduce number of dimensions to two. You are welcome to use the sklearn library for this question.*\n \n<br />\n\n<font color='red'>**Write your code in the cell below ...**</font>",
"_____no_output_____"
]
],
[
[
"from sklearn.linear_model import perceptron\n\n# n_components refers to the number of dimensions after mapping\n# n_neighbors is used for graph construction\nX_iso = manifold.Isomap(n_neighbors=30, n_components=2).fit_transform(X)\n\n# split off the test data\nphi_train, phi_test, y_train, y_test = train_test_split(X_iso, y, \n test_size=0.25, \n random_state=0)\n\n# Create the model\nclf = perceptron.Perceptron()\nclf.fit(phi_train, y_train)\n\nError = np.sum(clf.predict(phi_test) != y_test) / float(y_test.shape[0])\nprint(Error)\n\n# Create the KMeans model\n#clf = cluster.KMeans(init='k-means++', n_clusters=10, random_state=42)\n\n# Fit the training data to the model\n#clf.fit(X_train)\n\nf, (ax1, ax2) = plt.subplots(1, 2)\nax1.plot(X_iso[np.where(clf.predict(phi_test)==-1),0], X_iso[np.where(clf.predict(phi_test)==-1),1], \"bo\")\nax1.plot(X_iso[np.where(clf.predict(phi_test)==1),0], X_iso[np.where(clf.predict(phi_test)==1),1], \"ro\")\nax1.set_title('Predicted Training Labels for spectral')\nax2.plot(X_iso[np.where(y_test==-1),0], X_iso[np.where(y_test==-1),1], \"bo\")\nax2.plot(X_iso[np.where(y_test==1),0], X_iso[np.where(y_test==1),1], \"ro\")\nax2.set_title('Actual Training Labels for Isomap')\n",
"_____no_output_____"
],
[
"# split off the test data\nphi_train, phi_test, y_train, y_test = train_test_split(X_se, y, \n test_size=0.25, \n random_state=0)\n\n# Create the model\nclf = perceptron.Perceptron()\nclf.fit(phi_train, y_train)\n\nError = np.sum(clf.predict(phi_test) != y_test) / float(y_test.shape[0])\nprint(Error)\n\n# Create the KMeans model\n#clf = cluster.KMeans(init='k-means++', n_clusters=10, random_state=42)\n\n# Fit the training data to the model\n#clf.fit(X_train)\n\nf, (ax1, ax2) = plt.subplots(1, 2)\nax1.plot(X_se[np.where(clf.predict(phi_test)==-1),0], X_se[np.where(clf.predict(phi_test)==-1),1], \"bo\")\nax1.plot(X_se[np.where(clf.predict(phi_test)==1),0], X_se[np.where(clf.predict(phi_test)==1),1], \"ro\")\nax1.set_title('Predicted Training Labels for spectral')\nax2.plot(X_se[np.where(y_test==-1),0], X_se[np.where(y_test==-1),1], \"bo\")\nax2.plot(X_se[np.where(y_test==1),0], X_se[np.where(y_test==1),1], \"ro\")\nax2.set_title('Actual Training Labels for spectral')",
"_____no_output_____"
]
],
[
[
"In a few sentences, comment on the merits of the dimensionality reduction based approach compared to linear classification from Section 1.1 and basis expansion from Section 1.2.",
"_____no_output_____"
],
[
"<font color='red'>**Write your answer here ...**</font> (as a *markdown* cell)",
"_____no_output_____"
],
[
"From the error rate and the plots, we can find the performance of dimensionality reduction approach is pretty good, especially from the plots, which make the results clear and intuitive.\n\nThe merits over the linear classification is the classification becoming obviously faster, as well as getting high accuracy easily, which is because:\n1. It reduces the time and storage space required.\n2. Removal of multi-collinearity improves the performance of the machine learning model.\n\nBesides, it becomes easier to visualize the data when reduced to very low dimensions such as 2D or 3D, which could be more intuitive to look at the performance.",
"_____no_output_____"
],
[
"## 2. Kaggle Competition\nThe final part of the project is a competition, on more challenging digit data sourced from natural scenes. This data is coloured, pixelated or otherwise blurry, and the digits are not perfectly centered. It is often difficult for humans to classify! The dataset is also considerably larger. \n\nPlease sign up to the [COMP90051 Kaggle competition](https://inclass.kaggle.com/c/comp90051-2017) using your `student.unimelb.edu.au` email address. Then download the file `data.npz` from Kaggle. This is a compressed `numpy` data file containing three ndarray objects:\n - `train_X` training set, with 4096 input features (greyscale pixel values);\n - `train_Y` training labels (0-9)\n - `test_X` test set, with 4096 input features, as per above\n \nEach image is 64x64 pixels in size, which has been flattened into a vector of 4096 values. You should load the files using `np.load`, from which you can extract the three elements. You may need to transpose the images for display, as they were flattened in a different order. Each pixel has an intensity value between 0-255. For those using languages other than python, you may need to output these objects in another format, e.g., as a matlab matrix.\n\nYour job is to develop a *multiclass* classifier on this dataset. You can use whatever techniques you like, such as the perceptron code from above, or other methods such as *k*NN, logistic regression, neural networks, etc. You may want to compare several methods, or try an ensemble combination of systems. You are free to use any python libraries for this question. Note that some fancy machine learning algorithms can take several hours or days to train (we impose no time limits), so please start early to allow sufficient time. *Note that you may want to sample smaller training sets, if runtime is an issue, however this will degrade your accuracy. Sub-sampling is a sensible strategy when developing your code.*\n\nYou may also want to do some basic image processing, however, as this is not part of the subject, we would suggest that you focus most of your efforts on the machine learning. For inspiration, please see [Yan Lecun's MNIST page](http://yann.lecun.com/exdb/mnist/), specifically the table of results and the listed papers. Note that your dataset is harder than MNIST, so your mileage may vary.",
"_____no_output_____"
]
],
[
[
"%pylab inline\n# load the files\ntrain_X = np.load('data/train_X.npy')\ntrain_y = np.load('data/train_y.npy')\ntest_X = np.load('data/test_X.npy')\nprint(train_X.shape)\nprint(train_X[0].shape)\n\n#plt.imshow(train_X[0,:].reshape(64, 64), interpolation=None)\n\nplt.subplot(221)\nplt.imshow(train_X[0].reshape(64,64), cmap=plt.get_cmap('gray'))\nplt.subplot(222)\nplt.imshow(train_X[1].reshape(64,64), cmap=plt.get_cmap('gray'))\nplt.subplot(223)\nplt.imshow(train_X[2].reshape(64,64), cmap=plt.get_cmap('gray'))\nplt.subplot(224)\nplt.imshow(train_X[3].reshape(64,64), cmap=plt.get_cmap('gray'))\n",
"_____no_output_____"
],
[
"%pylab inline\n# Import `train_test_split`\nfrom sklearn.model_selection import train_test_split\n# Import GridSearchCV\nfrom sklearn.model_selection import GridSearchCV\n\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.layers import Dropout\nfrom keras.utils import np_utils\n\n# load the files\ntrain_X = np.load('data/train_X.npy')\ntrain_y = np.load('data/train_y.npy')\ntest_X = np.load('data/test_X.npy')\n\n# We can also reduce our memory requirements \n# by forcing the precision of the pixel values to be 32 bit\ntrain_X = train_X.astype('float32')\ntest_X = test_X.astype('float32')\n\n# normalize inputs from 0-255 to 0-1\ntrain_X = train_X / 255\ntest_X = test_X / 255\n\n# one hot encode outputs\n#train_y = np_utils.to_categorical(train_y)\n\n# Split the data into training and test sets \nX_train, X_test, y_train, y_test = train_test_split(train_X, train_y, test_size=0.25, random_state=42)\n\n#----------------------------SVC--------------------------------\n# Import the `svm` model\nfrom sklearn import svm\n\n# Create the SVC model \nsvc_model = svm.SVC(gamma=0.001, C=100., kernel='linear')\n\n# Fit the data to the SVC model\nsvc_model.fit(X_train, y_train)\n\nprint(svc_model.score(X_test, y_test))\n\n#----------------------SVM candidates----------------------------\n# Set the parameter candidates\nparameter_candidates = [\n {'C': [1, 10, 100, 1000], 'kernel': ['linear']},\n {'C': [1, 10, 100, 1000], 'gamma': [0.001, 0.0001], 'kernel': ['rbf']},\n]\n\n# Create a classifier with the parameter candidates\nclf = GridSearchCV(estimator=svm.SVC(), param_grid=parameter_candidates, n_jobs=1)\n\n# Train the classifier on training data\nclf.fit(X_train, y_train)\n\n# Print out the results \nprint('Best score for training data:', clf.best_score_)\nprint('Best `C`:',clf.best_estimator_.C)\nprint('Best kernel:',clf.best_estimator_.kernel)\nprint('Best `gamma`:',clf.best_estimator_.gamma)\n\n# Apply the classifier to the test data, and view the accuracy score\nprint(clf.score(X_test, y_test))\n\n\n",
"_____no_output_____"
],
[
"#--------------------------Baseline Model with Multi-Layer Perceptrons------------------------\n%pylab inline\n# Import `train_test_split`\nfrom sklearn.model_selection import train_test_split\n# Import GridSearchCV\nfrom sklearn.model_selection import GridSearchCV\n\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.layers import Dropout\nfrom keras.utils import np_utils\n\n# load the files\ntrain_X = np.load('data/train_X.npy')\ntrain_y = np.load('data/train_y.npy')\ntest_X = np.load('data/test_X.npy')\nprint(train_X.shape)\n\n# We can also reduce our memory requirements \n# by forcing the precision of the pixel values to be 32 bit\ntrain_X = train_X.astype('float32')\ntest_X = test_X.astype('float32')\n\n# normalize inputs from 0-255 to 0-1\ntrain_X = train_X / 255\ntest_X = test_X / 255\n\n# one hot encode outputs\ntrain_y = np_utils.to_categorical(train_y)\n\n# Split the data into training and test sets \nX_train, X_test, y_train, y_test = train_test_split(train_X, train_y, test_size=0.25, random_state=42)\n\nnum_pixels = X_train.shape[1]\nnum_classes = y_train.shape[1]\n\n# define baseline model\ndef baseline_model():\n # create model\n model = Sequential()\n model.add(Dense(num_pixels, input_dim=num_pixels, kernel_initializer='normal', activation='relu'))\n model.add(Dense(num_classes, kernel_initializer='normal', activation='softmax'))\n # Compile model\n model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])\n return model\n\n# build the model\nmodel = baseline_model()\n# Fit the model\nmodel.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=20, batch_size=200, verbose=2)\n# Final evaluation of the model\nscores = model.evaluate(X_test, y_test, verbose=0)\nprint(\"Baseline Error: %.2f%%\" % (100-scores[1]*100))\n\nprint('end')",
"_____no_output_____"
],
[
"#--------------------------Simple Convolutional Neural Network--------------------------\n%pylab inline\n# Import `train_test_split`\nfrom sklearn.model_selection import train_test_split\n\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.layers import Dropout\nfrom keras.layers import Flatten\nfrom keras.layers.convolutional import Conv2D\nfrom keras.layers.convolutional import MaxPooling2D\nfrom keras.utils import np_utils\nfrom keras import backend as K\nK.set_image_dim_ordering('th')\n\n# fix random seed for reproducibility\nseed = 7\nnp.random.seed(seed)\n\n# load the files\ntrain_X = np.load('data/train_X.npy')\ntrain_y = np.load('data/train_y.npy')\ntest_X = np.load('data/test_X.npy')\n\n# reshape to be [samples][pixels][width][height]\ntrain_X = train_X.reshape(train_X.shape[0], 1, 64, 64).astype('float32')\ntest_X = test_X.reshape(test_X.shape[0], 1, 64, 64).astype('float32')\n\n# normalize inputs from 0-255 to 0-1\ntrain_X = train_X / 255\ntest_X = test_X / 255\n# one hot encode outputs\ntrain_y = np_utils.to_categorical(train_y)\nnum_classes = train_y.shape[1]\n\n# Split the data into training and test sets \nX_train, X_test, y_train, y_test = train_test_split(train_X, train_y, test_size=0.25, random_state=42)\n\ndef baseline_model():\n # create model\n model = Sequential()\n model.add(Conv2D(32, (5, 5), input_shape=(1, 64, 64), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Dropout(0.2))\n model.add(Flatten())\n model.add(Dense(128, activation='relu'))\n model.add(Dense(num_classes, activation='softmax'))\n # Compile model\n model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])\n return model\n\n# build the model\nmodel = baseline_model()\n# Fit the model\nmodel.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=10, batch_size=400, verbose=2)\n# Final evaluation of the model\nscores = model.evaluate(X_test, y_test, verbose=0)\nprint(\"Baseline Error: %.2f%%\" % (100-scores[1]*100))\n\nnp.savetxt(\"simple.csv\",model.predict(test_X).argmax(1),delimiter=\",\")",
"_____no_output_____"
],
[
"#-------------------Larger Convolutional Neural Network-------------------\n%pylab inline\n# Import `train_test_split`\nfrom sklearn.model_selection import train_test_split\n\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.layers import Dropout\nfrom keras.layers import Flatten\nfrom keras.layers.convolutional import Conv2D\nfrom keras.layers.convolutional import MaxPooling2D\nfrom keras.utils import np_utils\nfrom keras import backend as K\nK.set_image_dim_ordering('th')\nfrom keras.callbacks import ModelCheckpoint\n\n# fix random seed for reproducibility\nseed = 7\nnp.random.seed(seed)\n\n# load the files\ntrain_X = np.load('data/train_X.npy')\ntrain_y = np.load('data/train_y.npy')\ntest_X = np.load('data/test_X.npy')\n\n# reshape to be [samples][pixels][width][height]\ntrain_X = train_X.reshape(train_X.shape[0], 1, 64, 64).astype('float32')\ntest_X = test_X.reshape(test_X.shape[0], 1, 64, 64).astype('float32')\n\n# normalize inputs from 0-255 to 0-1\ntrain_X = train_X / 255\ntest_X = test_X / 255\n# one hot encode outputs\ntrain_y = np_utils.to_categorical(train_y)\nnum_classes = train_y.shape[1]\n\n# Split the data into training and test sets \nX_train, X_test, y_train, y_test = train_test_split(train_X, train_y, test_size=0.25, random_state=42)\n\n# define the larger model\ndef larger_model():\n # create model\n model = Sequential()\n model.add(Conv2D(30, (5, 5), input_shape=(1, 64, 64), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Conv2D(15, (3, 3), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Dropout(0.2))\n model.add(Flatten())\n model.add(Dense(128, activation='relu'))\n model.add(Dense(50, activation='relu'))\n model.add(Dense(num_classes, activation='softmax'))\n # Compile model\n model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])\n return model\n\n# build the model\nmodel = larger_model()\n\n# Fit the model\nmodel.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=20, batch_size=200)\n\n# Final evaluation of the model\nscores = model.evaluate(X_test, y_test, verbose=0)\nprint(\"Large CNN Error: %.2f%%\" % (100-scores[1]*100))\n\nnp.savetxt(\"large_2.csv\",model.predict(test_X).argmax(1),delimiter=\",\")\n",
"_____no_output_____"
],
[
"#-------------------------------Visualization of results---------------------------------\nfrom sklearn import manifold\n# Create an isomap and fit the `digits` data to it\nX_test_reshape = X_test.reshape(X_test.shape[0], 4096)\n\nX_iso = manifold.Isomap(n_neighbors=10).fit_transform(X_test_reshape[0:1000])\nprint('prediction begins')\n# Compute cluster centers and predict cluster index for each sample\npredicted = model.predict(X_test[0:1000]).argmax(1)\nprint('plot')\n# Create a plot with subplots in a grid of 1X2\nfig, ax = plt.subplots(1, 2, figsize=(8, 4))\n\n# Adjust the layout\nfig.subplots_adjust(top=0.85)\n\ny_test_color = y_test[0:1000].argmax(1)\n\n# Add scatterplots to the subplots \nax[0].scatter(X_iso[:, 0], X_iso[:, 1], c=predicted)\nax[0].set_title('Predicted labels')\nax[1].scatter(X_iso[:, 0], X_iso[:, 1], c=y_test_color)\nax[1].set_title('Actual Labels')\n\n\n# Add title\nfig.suptitle('Predicted versus actual labels', fontsize=14, fontweight='bold')\n\n# Show the plot\nplt.show()",
"_____no_output_____"
],
[
"#--------------------------Increase batch size-----------------------------------\n# build the model\nmodel = larger_model()\n# Fit the model\nmodel.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=20, batch_size=2000)\n# Final evaluation of the model\nscores = model.evaluate(X_test, y_test, verbose=0)\nprint(\"Large CNN Error: %.2f%%\" % (100-scores[1]*100))\n\nnp.savetxt(\"large_3.csv\",model.predict(test_X).argmax(1),delimiter=\",\")\n",
"_____no_output_____"
],
[
"#----------------------------Increased layer larger CNN-------------------------------\n%pylab inline\n# Import `train_test_split`\nfrom sklearn.model_selection import train_test_split\n\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.layers import Dropout\nfrom keras.layers import Flatten\nfrom keras.layers.convolutional import Conv2D\nfrom keras.layers.convolutional import MaxPooling2D\nfrom keras.utils import np_utils\nfrom keras import backend as K\nK.set_image_dim_ordering('th')\nfrom keras.callbacks import ModelCheckpoint\n\n# fix random seed for reproducibility\nseed = 7\nnp.random.seed(seed)\n\n# load the files\ntrain_X = np.load('data/train_X.npy')\ntrain_y = np.load('data/train_y.npy')\ntest_X = np.load('data/test_X.npy')\n\n# reshape to be [samples][pixels][width][height]\ntrain_X = train_X.reshape(train_X.shape[0], 1, 64, 64).astype('float32')\ntest_X = test_X.reshape(test_X.shape[0], 1, 64, 64).astype('float32')\n\n# normalize inputs from 0-255 to 0-1\ntrain_X = train_X / 255\ntest_X = test_X / 255\n# one hot encode outputs\ntrain_y = np_utils.to_categorical(train_y)\nnum_classes = train_y.shape[1]\n\n# Split the data into training and test sets \nX_train, X_test, y_train, y_test = train_test_split(train_X, train_y, test_size=0.25, random_state=42)\n\n# define the larger model\ndef larger_model():\n # create model\n model = Sequential()\n model.add(Conv2D(64, (10, 10), input_shape=(1, 64, 64), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Conv2D(32, (5, 5), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Conv2D(16, (3, 3), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Dropout(0.2))\n model.add(Flatten())\n model.add(Dense(256, activation='relu'))\n model.add(Dense(128, activation='relu'))\n model.add(Dense(50, activation='relu'))\n model.add(Dense(num_classes, activation='softmax'))\n # Compile model\n model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])\n return model\n\n# build the model\nmodel = larger_model()\n# Fit the model\nmodel.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=20, batch_size=200)\n# Final evaluation of the model\nscores = model.evaluate(X_test, y_test, verbose=0)\nprint(\"Large CNN Error: %.2f%%\" % (100-scores[1]*100))\n\nnp.savetxt(\"large_4.csv\",model.predict(test_X).argmax(1),delimiter=\",\")\n",
"_____no_output_____"
]
],
[
[
"### 2.1 Making Submissions",
"_____no_output_____"
],
[
"This will be setup as a *Kaggle in class* competition, in which you can upload your system predictions on the test set. You should format your predictions as a csv file, with the same number of lines as the test set, and each line comprising two numbers `id, class` where *id* is the instance number (increasing integers starting from 1) and *class* is an integer between 0-9, corresponding to your system prediction. E.g., \n```\nId,Label\n1,9\n2,9\n3,4\n4,5\n5,1\n...```\nbased on the first five predictions of the system being classes `9 9 4 5 1`. See the `sample_submission.csv` for an example file.\n\nKaggle will report your accuracy on a public portion of the test set, and maintain a leaderboard showing the performance of you and your classmates. You will be allowed to upload up to four submissions each day. At the end of the competition, you should nominate your best submission, which will be scored on the private portion of the test set. The accuracy of your system (i.e., proportion of correctly classified examples) on the private test set will be used for grading your approach.\n\n**Marks will be assigned as follows**:\n - position in the class, where all students are ranked and then the ranks are linearly scaled to <br>0 marks (worst in class) - 4 marks (best in class) \n - absolute performance (4 marks), banded as follows (rounded to nearest integer): \n <br>below 80% = 0 marks; 80-89% = 1; 90-92% = 2; 93-94% = 3; above 95% = 4 marks",
"_____no_output_____"
],
[
"Note that you are required to submit your code with this notebook, submitted to the LMS. Failure to provide your implementation may result in assigning zero marks for the competition part, irrespective of the competition standing. Your implementation should be able to exactly reproduce submitted final Kaggle entry, and match your description below.",
"_____no_output_____"
],
[
"### 2.2. Method Description\nDescribe your approach, and justify each of the choices made within your approach. You should write a document with no more than 400 words, as a **PDF** file (not *docx* etc) with up to 2 pages of A4 (2 sides). Text must only appear on the first page, while the second page is for *figures and tables only*. Please use a font size of 11pt or higher. Please consider using `pdflatex` for the report, as it's considerably better for this purpose than wysiwyg document editors. You are encouraged to include empirical results, e.g., a table of results, graphs, or other figures to support your argument. *(this will contribute 9 marks; note that we are looking for clear presentation, sound reasoning, good evaluation and error analysis, as well as general ambition of approach.)*",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
4a63734bf4ae1799f31efd005ace28261aca9cd3
| 64,497 |
ipynb
|
Jupyter Notebook
|
student_intervention.ipynb
|
abdoulayegk/ml-practice
|
ccd8e23f3fc1a5d2ad2a4c4f485641043e0cce38
|
[
"MIT"
] | null | null | null |
student_intervention.ipynb
|
abdoulayegk/ml-practice
|
ccd8e23f3fc1a5d2ad2a4c4f485641043e0cce38
|
[
"MIT"
] | null | null | null |
student_intervention.ipynb
|
abdoulayegk/ml-practice
|
ccd8e23f3fc1a5d2ad2a4c4f485641043e0cce38
|
[
"MIT"
] | null | null | null | 39.041768 | 1,133 | 0.489604 |
[
[
[
"# Machine Learning Engineer Nanodegree\n## Supervised Learning\n## Project 2: Building a Student Intervention System",
"_____no_output_____"
],
[
"Welcome to the second project of the Machine Learning Engineer Nanodegree! In this notebook, some template code has already been provided for you, and it will be your job to implement the additional functionality necessary to successfully complete this project. Sections that begin with **'Implementation'** in the header indicate that the following block of code will require additional functionality which you must provide. Instructions will be provided for each section and the specifics of the implementation are marked in the code block with a `'TODO'` statement. Please be sure to read the instructions carefully!\n\nIn addition to implementing code, there will be questions that you must answer which relate to the project and your implementation. Each section where you will answer a question is preceded by a **'Question X'** header. Carefully read each question and provide thorough answers in the following text boxes that begin with **'Answer:'**. Your project submission will be evaluated based on your answers to each of the questions and the implementation you provide. \n\n>**Note:** Code and Markdown cells can be executed using the **Shift + Enter** keyboard shortcut. In addition, Markdown cells can be edited by typically double-clicking the cell to enter edit mode.",
"_____no_output_____"
],
[
"### Question 1 - Classification vs. Regression\n*Your goal for this project is to identify students who might need early intervention before they fail to graduate. Which type of supervised learning problem is this, classification or regression? Why?*",
"_____no_output_____"
],
[
"**Answer: ** Classification. It is basically a binary classification problem trying to identify whether or not a student need early intervention. It is not a regression problem trying to estimate some numbers.",
"_____no_output_____"
],
[
"## Exploring the Data\nRun the code cell below to load necessary Python libraries and load the student data. Note that the last column from this dataset, `'passed'`, will be our target label (whether the student graduated or didn't graduate). All other columns are features about each student.",
"_____no_output_____"
]
],
[
[
"# Import libraries\nimport numpy as np\nimport pandas as pd\nfrom time import time\nfrom sklearn.metrics import f1_score\n\n# Read student data\nstudent_data = pd.read_csv(\"student-data.csv\")\nprint(\"Student data read successfully!\")",
"Student data read successfully!\n"
]
],
[
[
"### Implementation: Data Exploration\nLet's begin by investigating the dataset to determine how many students we have information on, and learn about the graduation rate among these students. In the code cell below, you will need to compute the following:\n- The total number of students, `n_students`.\n- The total number of features for each student, `n_features`.\n- The number of those students who passed, `n_passed`.\n- The number of those students who failed, `n_failed`.\n- The graduation rate of the class, `grad_rate`, in percent (%).\n",
"_____no_output_____"
]
],
[
[
"# Preview the data\nstudent_data.head()",
"_____no_output_____"
],
[
"# TODO: Calculate number of students\nn_students = len(student_data)\n\n# TODO: Calculate number of features\nn_features = len(student_data.columns) - 1\n\n# TODO: Calculate passing students\nn_passed = len(student_data[student_data['passed'] == 'yes'])\n\n# TODO: Calculate failing students\nn_failed = len(student_data[student_data['passed'] == 'no'])\n\n# TODO: Calculate graduation rate\ngrad_rate = float(n_passed)/n_students * 100\n\n# Print the results\nprint(\"Total number of students: {}\".format(n_students))\nprint(\"Number of features: {}\".format(n_features))\nprint(\"Number of students who passed: {}\".format(n_passed))\nprint(\"Number of students who failed: {}\".format(n_failed))\nprint(\"Graduation rate of the class: {:.2f}%\".format(grad_rate))",
"Total number of students: 395\nNumber of features: 30\nNumber of students who passed: 265\nNumber of students who failed: 130\nGraduation rate of the class: 67.09%\n"
]
],
[
[
"## Preparing the Data\nIn this section, we will prepare the data for modeling, training and testing.\n\n### Identify feature and target columns\nIt is often the case that the data you obtain contains non-numeric features. This can be a problem, as most machine learning algorithms expect numeric data to perform computations with.\n\nRun the code cell below to separate the student data into feature and target columns to see if any features are non-numeric.",
"_____no_output_____"
]
],
[
[
"# Extract feature columns\nfeature_cols = list(student_data.columns[:-1])\n\n# Extract target column 'passed'\ntarget_col = student_data.columns[-1] \n\n# Show the list of columns\nprint(\"Feature columns:\\n{}\".format(feature_cols))\nprint(\"\\nTarget column: {}\".format(target_col))\n\n# Separate the data into feature data and target data (X_all and y_all, respectively)\nX_all = student_data[feature_cols]\ny_all = student_data[target_col]\n\n# Show the feature information by printing the first five rows\nprint(\"\\nFeature values:\")\nX_all.head()",
"Feature columns:\n['school', 'sex', 'age', 'address', 'famsize', 'Pstatus', 'Medu', 'Fedu', 'Mjob', 'Fjob', 'reason', 'guardian', 'traveltime', 'studytime', 'failures', 'schoolsup', 'famsup', 'paid', 'activities', 'nursery', 'higher', 'internet', 'romantic', 'famrel', 'freetime', 'goout', 'Dalc', 'Walc', 'health', 'absences']\n\nTarget column: passed\n\nFeature values:\n"
]
],
[
[
"### Preprocess Feature Columns\n\nAs you can see, there are several non-numeric columns that need to be converted! Many of them are simply `yes`/`no`, e.g. `internet`. These can be reasonably converted into `1`/`0` (binary) values.\n\nOther columns, like `Mjob` and `Fjob`, have more than two values, and are known as _categorical variables_. The recommended way to handle such a column is to create as many columns as possible values (e.g. `Fjob_teacher`, `Fjob_other`, `Fjob_services`, etc.), and assign a `1` to one of them and `0` to all others.\n\nThese generated columns are sometimes called _dummy variables_, and we will use the [`pandas.get_dummies()`](http://pandas.pydata.org/pandas-docs/stable/generated/pandas.get_dummies.html?highlight=get_dummies#pandas.get_dummies) function to perform this transformation. Run the code cell below to perform the preprocessing routine discussed in this section.",
"_____no_output_____"
]
],
[
[
"def preprocess_features(X):\n ''' Preprocesses the student data and converts non-numeric binary variables into\n binary (0/1) variables. Converts categorical variables into dummy variables. '''\n \n # Initialize new output DataFrame\n output = pd.DataFrame(index = X.index)\n\n # Investigate each feature column for the data\n for col, col_data in X.iteritems():\n \n # If data type is non-numeric, replace all yes/no values with 1/0\n if col_data.dtype == object:\n col_data = col_data.replace(['yes', 'no'], [1, 0])\n\n # If data type is categorical, convert to dummy variables\n if col_data.dtype == object:\n # Example: 'school' => 'school_GP' and 'school_MS'\n col_data = pd.get_dummies(col_data, prefix = col) \n \n # Collect the revised columns\n output = output.join(col_data)\n \n return output\n\nX_all = preprocess_features(X_all)\nprint(\"Processed feature columns ({} total features):\\n{}\"\\\n .format(len(X_all.columns), list(X_all.columns)))",
"Processed feature columns (48 total features):\n['school_GP', 'school_MS', 'sex_F', 'sex_M', 'age', 'address_R', 'address_U', 'famsize_GT3', 'famsize_LE3', 'Pstatus_A', 'Pstatus_T', 'Medu', 'Fedu', 'Mjob_at_home', 'Mjob_health', 'Mjob_other', 'Mjob_services', 'Mjob_teacher', 'Fjob_at_home', 'Fjob_health', 'Fjob_other', 'Fjob_services', 'Fjob_teacher', 'reason_course', 'reason_home', 'reason_other', 'reason_reputation', 'guardian_father', 'guardian_mother', 'guardian_other', 'traveltime', 'studytime', 'failures', 'schoolsup', 'famsup', 'paid', 'activities', 'nursery', 'higher', 'internet', 'romantic', 'famrel', 'freetime', 'goout', 'Dalc', 'Walc', 'health', 'absences']\n"
],
[
"# Preview the preprocessed data\nX_all.head()",
"_____no_output_____"
]
],
[
[
"### Implementation: Training and Testing Data Split\nSo far, we have converted all _categorical_ features into numeric values. For the next step, we split the data (both features and corresponding labels) into training and test sets. In the following code cell below, you will need to implement the following:\n- Randomly shuffle and split the data (`X_all`, `y_all`) into training and testing subsets.\n - Use 300 training points (approximately 75%) and 95 testing points (approximately 25%).\n - Set a `random_state` for the function(s) you use, if provided.\n - Store the results in `X_train`, `X_test`, `y_train`, and `y_test`.",
"_____no_output_____"
]
],
[
[
"# TODO: Import any additional functionality you may need here\n\n# TODO: Set the number of training points\nnum_train = 300\n\n# Set the number of testing points\nnum_test = X_all.shape[0] - num_train\n\n# TODO: Shuffle and split the dataset into the number of training and testing points above\nX_train = X_all.sample(n=num_train, random_state=1)\nX_test = X_all[~X_all.index.isin(X_train.index)]\ny_train = y_all[X_train.index]\ny_test = y_all[~y_all.index.isin(y_train.index)]\n\n# Show the results of the split\nprint(\"Training set has {} samples.\".format(X_train.shape[0]))\nprint(\"Testing set has {} samples.\".format(X_test.shape[0]))",
"Training set has 300 samples.\nTesting set has 95 samples.\n"
]
],
[
[
"## Training and Evaluating Models\nIn this section, you will choose 3 supervised learning models that are appropriate for this problem and available in `scikit-learn`. You will first discuss the reasoning behind choosing these three models by considering what you know about the data and each model's strengths and weaknesses. You will then fit the model to varying sizes of training data (100 data points, 200 data points, and 300 data points) and measure the F<sub>1</sub> score. You will need to produce three tables (one for each model) that shows the training set size, training time, prediction time, F<sub>1</sub> score on the training set, and F<sub>1</sub> score on the testing set.\n\n**The following supervised learning models are currently available in** [`scikit-learn`](http://scikit-learn.org/stable/supervised_learning.html) **that you may choose from:**\n- Gaussian Naive Bayes (GaussianNB)\n- Decision Trees\n- Ensemble Methods (Bagging, AdaBoost, Random Forest, Gradient Boosting)\n- K-Nearest Neighbors (KNeighbors)\n- Stochastic Gradient Descent (SGDC)\n- Support Vector Machines (SVM)\n- Logistic Regression",
"_____no_output_____"
],
[
"### Question 2 - Model Application\n*List three supervised learning models that are appropriate for this problem. For each model chosen*\n- Describe one real-world application in industry where the model can be applied. *(You may need to do a small bit of research for this — give references!)* \n- What are the strengths of the model; when does it perform well? \n- What are the weaknesses of the model; when does it perform poorly?\n- What makes this model a good candidate for the problem, given what you know about the data?",
"_____no_output_____"
],
[
"**Answer: **\n\nDecision Trees\n* It was applied in pharmacology for [drug analysis](https://www.ncbi.nlm.nih.gov/pubmed/8170530).\n* The strength of the Decision Trees is that the model is easy to interprete and understand. It is able to handle both numerical and categorical features very well. Therefore, when the data has numerical and also a lot of categorical features, the Decision Trees model usually can do very well. It is also able to classify linearly-inseparable data.\n* The weakness of the Decision Trees model is that it is very easy to overfit. One should be careful during training the model and apply pruning conditions if necessary. Cross-validation is also useful to prevent overfiting.\n* In our dataset, the data contains both numerical and categorical data. Therefore, applying Decision Trees model might be very useful.\n\nSupport Vector Machines (SVM)\n* It was applied in [facial expression classification](http://cbcl.mit.edu/publications/ps/iccv2001.pdf).\n* The strengh of the Support Vector Machine model is that by using different kernels it is able to separate linear-inseparable data. It maximizes the margin between two classes and is easy to control overfitting. \n* The weakness of the Support Vector Machine model is that the training time and hardware cost becomes extremely high when the training data size becomes larger.\n* For our dateset, the size of dataset is actually small, which limites the training and prediction time of the Support Vector Machine model.\n\n\nGaussian Naive Bayes (GaussianNB)\n* It was applied in [automatic medical diagnosis](http://www.research.ibm.com/people/r/rish/papers/RC22230.pdf).\n* An advantage of naive Bayes is that it only requires a small number of training data to estimate the parameters necessary for classification.\n* The weakness of the model is that the the model assumes all the features are independent to each other, which makes too stringent for most of the data.\n* For our dateset, the size of dataset is actually small, which makes it extremely suitable for the Gaussian Naive Bayes.",
"_____no_output_____"
],
[
"### Setup\nRun the code cell below to initialize three helper functions which you can use for training and testing the three supervised learning models you've chosen above. The functions are as follows:\n- `train_classifier` - takes as input a classifier and training data and fits the classifier to the data.\n- `predict_labels` - takes as input a fit classifier, features, and a target labeling and makes predictions using the F<sub>1</sub> score.\n- `train_predict` - takes as input a classifier, and the training and testing data, and performs `train_clasifier` and `predict_labels`.\n - This function will report the F<sub>1</sub> score for both the training and testing data separately.",
"_____no_output_____"
]
],
[
[
"def train_classifier(clf, X_train, y_train):\n ''' Fits a classifier to the training data. '''\n \n # Start the clock, train the classifier, then stop the clock\n start = time()\n clf.fit(X_train, y_train)\n end = time()\n \n # Print the results\n print(\"Trained model in {:.4f} seconds\".format(end - start))\n \n return float(end - start)\n\n \ndef predict_labels(clf, features, target):\n ''' Makes predictions using a fit classifier based on F1 score. '''\n \n # Start the clock, make predictions, then stop the clock\n start = time()\n y_pred = clf.predict(features)\n end = time()\n \n # Print and return results\n print(\"Made predictions in {:.4f} seconds.\".format(end - start))\n return (float(end - start), f1_score(target.values, y_pred, pos_label='yes'))\n\n\ndef train_predict(clf, X_train, y_train, X_test, y_test):\n ''' Train and predict using a classifer based on F1 score. '''\n \n # Indicate the classifier and the training set size\n print(\"Training a {} using a training set size of {}. . .\".format(clf.__class__.__name__, len(X_train)))\n \n # Train the classifier\n time_training = train_classifier(clf, X_train, y_train)\n \n # Print the results of prediction for both training and testing\n (prediction_time_training, f1_score_training) = predict_labels(clf, X_train, y_train)\n (prediction_time_test, f1_score_test) = predict_labels(clf, X_test, y_test)\n print(\"F1 score for training set: {:.4f}.\".format(f1_score_training))\n print(\"F1 score for test set: {:.4f}.\".format(f1_score_test))\n return (time_training, prediction_time_test, f1_score_training, f1_score_test)",
"_____no_output_____"
]
],
[
[
"### Implementation: Model Performance Metrics\nWith the predefined functions above, you will now import the three supervised learning models of your choice and run the `train_predict` function for each one. Remember that you will need to train and predict on each classifier for three different training set sizes: 100, 200, and 300. Hence, you should expect to have 9 different outputs below — 3 for each model using the varying training set sizes. In the following code cell, you will need to implement the following:\n- Import the three supervised learning models you've discussed in the previous section.\n- Initialize the three models and store them in `clf_A`, `clf_B`, and `clf_C`.\n - Use a `random_state` for each model you use, if provided.\n - **Note:** Use the default settings for each model — you will tune one specific model in a later section.\n- Create the different training set sizes to be used to train each model.\n - *Do not reshuffle and resplit the data! The new training points should be drawn from `X_train` and `y_train`.*\n- Fit each model with each training set size and make predictions on the test set (9 in total). \n**Note:** Three tables are provided after the following code cell which can be used to store your results.",
"_____no_output_____"
]
],
[
[
"# TODO: Import the three supervised learning models from sklearn\nfrom sklearn.tree import DecisionTreeClassifier # Decision Tree\nfrom sklearn.svm import SVC # SVM\nfrom sklearn.naive_bayes import GaussianNB # Naive Bayesian Gaussian\n# from sklearn.neighbors import KNeighborsClassifier # k-NN \n# TODO: Initialize the three models\nclf_A = DecisionTreeClassifier()\nclf_B = SVC()\nclf_C = GaussianNB()\n# clf_C = KNeighborsClassifier()\n\n# TODO: Set up the training set sizes\nX_train_100 = X_train[0:100]\ny_train_100 = y_train[0:100]\n\nX_train_200 = X_train[0:200]\ny_train_200 = y_train[0:200]\n\nX_train_300 = X_train[0:300]\ny_train_300 = y_train[0:300]\n\n# TODO: Execute the 'train_predict' function for each classifier and each training set size\n# train_predict(clf, X_train, y_train, X_test, y_test)\n\nClassifier_1 = pd.DataFrame\\\n(columns=['Training Set Size', 'Training Time', 'Prediction Time (test)', 'F1 Score (train)', 'F1 Score (test)'])\nfor (size, X_training_set, y_training_set) in [(100, X_train_100, y_train_100), (200, X_train_200, y_train_200), (300, X_train_300, y_train_300)]:\n print(size)\n (time_training, prediction_time_test, f1_score_training, f1_score_test) = \\\n train_predict(clf_A, X_training_set, y_training_set, X_test, y_test)\n Classifier_1 = Classifier_1.append({'Training Set Size': size, 'Training Time': round(time_training, 4), \\\n 'Prediction Time (test)': round(prediction_time_test, 4), \\\n 'F1 Score (train)': round(f1_score_training, 4), \\\n 'F1 Score (test)': round(f1_score_test, 4)}, ignore_index=True)\n\nClassifier_2 = pd.DataFrame\\\n(columns=['Training Set Size', 'Training Time', 'Prediction Time (test)', 'F1 Score (train)', 'F1 Score (test)'])\nfor (size, X_training_set, y_training_set) in [(100, X_train_100, y_train_100), (200, X_train_200, y_train_200), (300, X_train_300, y_train_300)]:\n print(size)\n (time_training, prediction_time_test, f1_score_training, f1_score_test) = \\\n train_predict(clf_B, X_training_set, y_training_set, X_test, y_test)\n Classifier_2 = Classifier_2.append({'Training Set Size': size, 'Training Time': round(time_training, 4), \\\n 'Prediction Time (test)': round(prediction_time_test, 4), \\\n 'F1 Score (train)': round(f1_score_training, 4), \\\n 'F1 Score (test)': round(f1_score_test, 4)}, ignore_index=True)\n\nClassifier_3 = pd.DataFrame\\\n(columns=['Training Set Size', 'Training Time', 'Prediction Time (test)', 'F1 Score (train)', 'F1 Score (test)'])\nfor (size, X_training_set, y_training_set) in [(100, X_train_100, y_train_100), (200, X_train_200, y_train_200), (300, X_train_300, y_train_300)]:\n print(size)\n (time_training, prediction_time_test, f1_score_training, f1_score_test) = \\\n train_predict(clf_C, X_training_set, y_training_set, X_test, y_test)\n Classifier_3 = Classifier_3.append({'Training Set Size': size, 'Training Time': round(time_training, 4), \\\n 'Prediction Time (test)': round(prediction_time_test, 4), \\\n 'F1 Score (train)': round(f1_score_training, 4), \\\n 'F1 Score (test)': round(f1_score_test, 4)}, ignore_index=True)\n",
"100\nTraining a DecisionTreeClassifier using a training set size of 100. . .\nTrained model in 0.0039 seconds\nMade predictions in 0.0023 seconds.\nMade predictions in 0.0019 seconds.\nF1 score for training set: 1.0000.\nF1 score for test set: 0.7395.\n200\nTraining a DecisionTreeClassifier using a training set size of 200. . .\nTrained model in 0.0035 seconds\nMade predictions in 0.0020 seconds.\nMade predictions in 0.0019 seconds.\nF1 score for training set: 1.0000.\nF1 score for test set: 0.6891.\n300\nTraining a DecisionTreeClassifier using a training set size of 300. . .\nTrained model in 0.0040 seconds\nMade predictions in 0.0020 seconds.\nMade predictions in 0.0019 seconds.\nF1 score for training set: 1.0000.\nF1 score for test set: 0.7656.\n100\nTraining a SVC using a training set size of 100. . .\nTrained model in 0.0050 seconds\nMade predictions in 0.0032 seconds.\nMade predictions in 0.0025 seconds.\nF1 score for training set: 0.8214.\nF1 score for test set: 0.7843.\n200\nTraining a SVC using a training set size of 200. . .\nTrained model in 0.0049 seconds\nMade predictions in 0.0045 seconds.\nMade predictions in 0.0030 seconds.\nF1 score for training set: 0.8168.\nF1 score for test set: 0.7947.\n300\nTraining a SVC using a training set size of 300. . .\nTrained model in 0.0078 seconds\nMade predictions in 0.0071 seconds.\nMade predictions in 0.0034 seconds.\nF1 score for training set: 0.8095.\nF1 score for test set: 0.7821.\n100\nTraining a GaussianNB using a training set size of 100. . .\nTrained model in 0.0028 seconds\nMade predictions in 0.0020 seconds.\nMade predictions in 0.0020 seconds.\nF1 score for training set: 0.1842.\nF1 score for test set: 0.4048.\n200\nTraining a GaussianNB using a training set size of 200. . .\nTrained model in 0.0034 seconds\nMade predictions in 0.0022 seconds.\nMade predictions in 0.0022 seconds.\nF1 score for training set: 0.8129.\nF1 score for test set: 0.7619.\n300\nTraining a GaussianNB using a training set size of 300. . .\nTrained model in 0.0030 seconds\nMade predictions in 0.0023 seconds.\nMade predictions in 0.0020 seconds.\nF1 score for training set: 0.8029.\nF1 score for test set: 0.7619.\n"
]
],
[
[
"### Tabular Results\nEdit the cell below to see how a table can be designed in [Markdown](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet#tables). You can record your results from above in the tables provided.",
"_____no_output_____"
],
[
"Classifier_1",
"_____no_output_____"
]
],
[
[
"# Decision Tree\nClassifier_1",
"_____no_output_____"
]
],
[
[
"Classifier_2",
"_____no_output_____"
]
],
[
[
"# Support Vector Machine\nClassifier_2",
"_____no_output_____"
]
],
[
[
"Classifier_3",
"_____no_output_____"
]
],
[
[
"# Naive Bayesian Gaussian\nClassifier_3",
"_____no_output_____"
]
],
[
[
"## Choosing the Best Model\nIn this final section, you will choose from the three supervised learning models the *best* model to use on the student data. You will then perform a grid search optimization for the model over the entire training set (`X_train` and `y_train`) by tuning at least one parameter to improve upon the untuned model's F<sub>1</sub> score. ",
"_____no_output_____"
],
[
"### Question 3 - Choosing the Best Model\n*Based on the experiments you performed earlier, in one to two paragraphs, explain to the board of supervisors what single model you chose as the best model. Which model is generally the most appropriate based on the available data, limited resources, cost, and performance?*",
"_____no_output_____"
],
[
"**Answer: ** \n\nI think the Support Vector Machine model is the best and the most appropriate model given our dataset, computational cost and model performance. Regarding the training time, the differences between the three models are negaligible. It should be noted that usually the training time of the Support Vector Machine model increases as the dataset size increases. However, because our dataset size is quite small, the training time did not reflect a defect of our model. Similarly, the prediction time differences between the three models are also negaligible. Regarding the F1_scores (training) of the three model, F1_score (training) equals 1.0 in all three trainings in the Decision Tree model, which is certainly a reflection of overfitting. F1_score (training) is bad in the Naive Bayesian model with training set size of 100, because the training set size is too small and the features are too many. Overall, the Support Vector Machine model showed the better F1_score (training). In addition, the Support Vector Machine model also showed the best F1_score (test), suggesting better generalization over the other two models.\n",
"_____no_output_____"
],
[
"### Question 4 - Model in Layman's Terms\n*In one to two paragraphs, explain to the board of directors in layman's terms how the final model chosen is supposed to work. Be sure that you are describing the major qualities of the model, such as how the model is trained and how the model makes a prediction. Avoid using advanced mathematical or technical jargon, such as describing equations or discussing the algorithm implementation.*",
"_____no_output_____"
],
[
"**Answer: ** Basically with the labeled training data Support Vector Machine creates a hyperplane that separates the labeled data. Each side of the hyperplane represents a class of data. This hyperplane also maximize the distances to its nearest training data of any class, representing lower generalization error. To predict the label of new data, simply calculate which side of hyperplane the new data is in. ",
"_____no_output_____"
],
[
"### Implementation: Model Tuning\nFine tune the chosen model. Use grid search (`GridSearchCV`) with at least one important parameter tuned with at least 3 different values. You will need to use the entire training set for this. In the code cell below, you will need to implement the following:\n- Import [`https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.GridSearchCV.html`](http://scikit-learn.org/stable/modules/generated/sklearn.grid_search.GridSearchCV.html) and [`sklearn.metrics.make_scorer`](http://scikit-learn.org/stable/modules/generated/sklearn.metrics.make_scorer.html).\n- Create a dictionary of parameters you wish to tune for the chosen model.\n - Example: `parameters = {'parameter' : [list of values]}`.\n- Initialize the classifier you've chosen and store it in `clf`.\n- Create the F<sub>1</sub> scoring function using `make_scorer` and store it in `f1_scorer`.\n - Set the `pos_label` parameter to the correct value!\n- Perform grid search on the classifier `clf` using `f1_scorer` as the scoring method, and store it in `grid_obj`.\n- Fit the grid search object to the training data (`X_train`, `y_train`), and store it in `grid_obj`.",
"_____no_output_____"
]
],
[
[
"# TODO: Import 'GridSearchCV' and 'make_scorer'\nfrom sklearn.model_selection import GridSearchCV\nfrom sklearn.metrics import make_scorer\nfrom sklearn.metrics import f1_score\n\n# TODO: Create the parameters list you wish to tune\nparameters = {'C': [0.1, 1, 10], 'kernel': ['linear', 'poly', 'rbf']}\n\n# TODO: Initialize the classifier\nclf = SVC()\n\n# TODO: Make an f1 scoring function using 'make_scorer' \nf1_scorer = make_scorer(f1_score, pos_label='yes')\n\n# TODO: Perform grid search on the classifier using the f1_scorer as the scoring method\ngrid_obj = GridSearchCV(estimator = clf, param_grid = parameters, scoring = f1_scorer)\n\n# TODO: Fit the grid search object to the training data and find the optimal parameters\ngrid_obj.fit(X_train, y_train)\n\n# Get the estimator\nclf = grid_obj.best_estimator_\n\n# Report the final F1 score for training and testing after parameter tuning\nprint(\"Tuned model has a training F1 score of {:.4f}.\".format(predict_labels(clf, X_train, y_train)[1]))\nprint(\"Tuned model has a testing F1 score of {:.4f}.\".format(predict_labels(clf, X_test, y_test)[1]))",
"Made predictions in 0.0067 seconds.\nTuned model has a training F1 score of 0.8540.\nMade predictions in 0.0037 seconds.\nTuned model has a testing F1 score of 0.7917.\n"
]
],
[
[
"### Question 5 - Final F<sub>1</sub> Score\n*What is the final model's F<sub>1</sub> score for training and testing? How does that score compare to the untuned model?*",
"_____no_output_____"
],
[
"**Answer: ** The F1 score for training and testing of optimized Support Vector Machine model are 0.8540 and 0.8085, respectively. The F1 score for training is lower than the untuned model while the F1 score for testing is higher than the untuned model. It is probably because the optimized Support Vector Machine used a different kernel compared to the untuned one so that the F1 score for training is lower. However, because the GridSearchCV funtion internally used cross-validation technique therefore the F1 score for testing is higher than the untuned one.",
"_____no_output_____"
],
[
"> **Note**: Once you have completed all of the code implementations and successfully answered each question above, you may finalize your work by exporting the iPython Notebook as an HTML document. You can do this by using the menu above and navigating to \n**File -> Download as -> HTML (.html)**. Include the finished document along with this notebook as your submission.",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
]
] |
4a6373aff4053de8cfd8413e5e4803797a09ed10
| 95,793 |
ipynb
|
Jupyter Notebook
|
Autoenconder.ipynb
|
JoanesMiranda/Machine-learning
|
86aae95c4daf384f96c5ac5852a1923160661ff5
|
[
"MIT"
] | 1 |
2020-06-13T14:58:15.000Z
|
2020-06-13T14:58:15.000Z
|
Autoenconder.ipynb
|
JoanesMiranda/Machine-learning
|
86aae95c4daf384f96c5ac5852a1923160661ff5
|
[
"MIT"
] | null | null | null |
Autoenconder.ipynb
|
JoanesMiranda/Machine-learning
|
86aae95c4daf384f96c5ac5852a1923160661ff5
|
[
"MIT"
] | null | null | null | 104.122826 | 45,402 | 0.773814 |
[
[
[
"<a href=\"https://colab.research.google.com/github/JoanesMiranda/Machine-learning/blob/master/Autoenconder.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
],
[
"### Importando as bibliotecas necessárias",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport matplotlib.pyplot as plt\nimport tensorflow as tf\nfrom tensorflow.keras.datasets import mnist",
"_____no_output_____"
]
],
[
[
"### Carregando a base de dados",
"_____no_output_____"
]
],
[
[
"(x_train, y_train),(x_test, y_test) = mnist.load_data()",
"_____no_output_____"
]
],
[
[
"### Plotando uma amostra das imagens",
"_____no_output_____"
]
],
[
[
"plt.imshow(x_train[10], cmap=\"gray\")",
"_____no_output_____"
]
],
[
[
"### Aplicando normalização nos dados de treino e teste",
"_____no_output_____"
]
],
[
[
"x_train = x_train / 255.0\nx_test = x_test / 255.0\n\nprint(x_train.shape)\nprint(x_test.shape)",
"(60000, 28, 28)\n(10000, 28, 28)\n"
]
],
[
[
"### Adicionando ruido a base de treino",
"_____no_output_____"
]
],
[
[
"noise = 0.3\nnoise_x_train = []\n\nfor img in x_train:\n noisy_image = img + noise * np.random.randn(*img.shape)\n noisy_image = np.clip(noisy_image, 0., 1.)\n noise_x_train.append(noisy_image)\n\nnoise_x_train = np.array(noise_x_train)\nprint(noise_x_train.shape)",
"(60000, 28, 28)\n"
]
],
[
[
"### Plotando uma amostra da imagem com o ruido aplicado",
"_____no_output_____"
]
],
[
[
"plt.imshow(noise_x_train[10], cmap=\"gray\")",
"_____no_output_____"
]
],
[
[
"### Adicionando ruido a base de teste",
"_____no_output_____"
]
],
[
[
"noise = 0.3\nnoise_x_test = []\n\nfor img in x_train:\n noisy_image = img + noise * np.random.randn(*img.shape)\n noisy_image = np.clip(noisy_image, 0., 1.)\n noise_x_test.append(noisy_image)\n\nnoise_x_test = np.array(noise_x_test)\nprint(noise_x_test.shape)",
"(60000, 28, 28)\n"
]
],
[
[
"### Plotando uma amostra da imagem com o ruido aplicado",
"_____no_output_____"
]
],
[
[
"plt.imshow(noise_x_test[10], cmap=\"gray\")\n",
"_____no_output_____"
],
[
"noise_x_train = np.reshape(noise_x_train,(-1, 28, 28, 1))\nnoise_x_test = np.reshape(noise_x_test,(-1, 28, 28, 1))\n\nprint(noise_x_train.shape)\nprint(noise_x_test.shape)",
"(60000, 28, 28, 1)\n(60000, 28, 28, 1)\n"
]
],
[
[
"### Autoencoder",
"_____no_output_____"
]
],
[
[
"x_input = tf.keras.layers.Input((28,28,1))\n\n# encoder\nx = tf.keras.layers.Conv2D(filters=16, kernel_size=3, strides=2, padding='same')(x_input)\nx = tf.keras.layers.Conv2D(filters=8, kernel_size=3, strides=2, padding='same')(x)\n\n# decoder\nx = tf.keras.layers.Conv2DTranspose(filters=16, kernel_size=3, strides=2, padding='same')(x)\nx = tf.keras.layers.Conv2DTranspose(filters=1, kernel_size=3, strides=2, activation='sigmoid', padding='same')(x)\n\nmodel = tf.keras.models.Model(inputs=x_input, outputs=x)\nmodel.compile(loss='binary_crossentropy', optimizer=tf.keras.optimizers.Adam(lr=0.001))\n\nmodel.summary()",
"Model: \"functional_15\"\n_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\ninput_8 (InputLayer) [(None, 28, 28, 1)] 0 \n_________________________________________________________________\nconv2d_14 (Conv2D) (None, 14, 14, 16) 160 \n_________________________________________________________________\nconv2d_15 (Conv2D) (None, 7, 7, 8) 1160 \n_________________________________________________________________\nconv2d_transpose_14 (Conv2DT (None, 14, 14, 16) 1168 \n_________________________________________________________________\nconv2d_transpose_15 (Conv2DT (None, 28, 28, 1) 145 \n=================================================================\nTotal params: 2,633\nTrainable params: 2,633\nNon-trainable params: 0\n_________________________________________________________________\n"
]
],
[
[
"### Treinando os dados",
"_____no_output_____"
]
],
[
[
"model.fit(noise_x_train, x_train, batch_size=100, validation_split=0.1, epochs=10)",
"Epoch 1/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.1805 - val_loss: 0.0944\nEpoch 2/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0898 - val_loss: 0.0884\nEpoch 3/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0869 - val_loss: 0.0871\nEpoch 4/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0862 - val_loss: 0.0866\nEpoch 5/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0858 - val_loss: 0.0863\nEpoch 6/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0857 - val_loss: 0.0862\nEpoch 7/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0856 - val_loss: 0.0861\nEpoch 8/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0855 - val_loss: 0.0861\nEpoch 9/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0855 - val_loss: 0.0862\nEpoch 10/10\n540/540 [==============================] - 3s 5ms/step - loss: 0.0855 - val_loss: 0.0860\n"
]
],
[
[
"### Realizando a predição das imagens usando os dados de teste com o ruido aplicado",
"_____no_output_____"
]
],
[
[
"predicted = model.predict(noise_x_test)\npredicted",
"_____no_output_____"
]
],
[
[
"### Plotando as imagens com ruido e depois de aplicar o autoencoder",
"_____no_output_____"
]
],
[
[
"fig, axes = plt.subplots(nrows=2, ncols=10, sharex=True, sharey=True, figsize=(20,4))\nfor images, row in zip([noise_x_test[:10], predicted], axes):\n for img, ax in zip(images, row):\n ax.imshow(img.reshape((28, 28)), cmap='Greys_r')\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n ",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a63798d34be69493a47e00838672f29dc2dc285
| 278,306 |
ipynb
|
Jupyter Notebook
|
SegmentingandClusteringPart3.ipynb
|
NeilMascarenhas/Coursera_Capstone
|
c094bc83ecc40898b4d96d02a7077c02eba887a7
|
[
"MIT"
] | null | null | null |
SegmentingandClusteringPart3.ipynb
|
NeilMascarenhas/Coursera_Capstone
|
c094bc83ecc40898b4d96d02a7077c02eba887a7
|
[
"MIT"
] | null | null | null |
SegmentingandClusteringPart3.ipynb
|
NeilMascarenhas/Coursera_Capstone
|
c094bc83ecc40898b4d96d02a7077c02eba887a7
|
[
"MIT"
] | 16 |
2020-04-13T21:15:59.000Z
|
2021-07-11T12:13:57.000Z
| 579.804167 | 60,219 | 0.721767 |
[
[
[
"## Peer-graded Assignment: Segmenting and Clustering Neighborhoods in Toronto",
"_____no_output_____"
],
[
"# Part 3",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport numpy as np\nimport json\n\n!conda install -c conda-forge geopy --yes\nfrom geopy.geocoders import Nominatim\n\nimport requests\nfrom pandas.io.json import json_normalize\n\nimport matplotlib.cm as cm\nimport matplotlib.colors as colors\n\nfrom sklearn.cluster import KMeans\n\n!conda install -c conda-forge folium=0.5.0 --yes\nimport folium",
"Solving environment: done\n\n## Package Plan ##\n\n environment location: /opt/conda/envs/Python36\n\n added / updated specs: \n - geopy\n\n\nThe following packages will be downloaded:\n\n package | build\n ---------------------------|-----------------\n openssl-1.1.1d | h516909a_0 2.1 MB conda-forge\n geographiclib-1.50 | py_0 34 KB conda-forge\n geopy-1.21.0 | py_0 58 KB conda-forge\n ca-certificates-2019.11.28 | hecc5488_0 145 KB conda-forge\n certifi-2019.11.28 | py36_0 149 KB conda-forge\n ------------------------------------------------------------\n Total: 2.5 MB\n\nThe following NEW packages will be INSTALLED:\n\n geographiclib: 1.50-py_0 conda-forge\n geopy: 1.21.0-py_0 conda-forge\n\nThe following packages will be UPDATED:\n\n ca-certificates: 2019.11.27-0 --> 2019.11.28-hecc5488_0 conda-forge\n certifi: 2019.11.28-py36_0 --> 2019.11.28-py36_0 conda-forge\n\nThe following packages will be DOWNGRADED:\n\n openssl: 1.1.1d-h7b6447c_3 --> 1.1.1d-h516909a_0 conda-forge\n\n\nDownloading and Extracting Packages\nopenssl-1.1.1d | 2.1 MB | ##################################### | 100% \ngeographiclib-1.50 | 34 KB | ##################################### | 100% \ngeopy-1.21.0 | 58 KB | ##################################### | 100% \nca-certificates-2019 | 145 KB | ##################################### | 100% \ncertifi-2019.11.28 | 149 KB | ##################################### | 100% \nPreparing transaction: done\nVerifying transaction: done\nExecuting transaction: done\nSolving environment: done\n\n## Package Plan ##\n\n environment location: /opt/conda/envs/Python36\n\n added / updated specs: \n - folium=0.5.0\n\n\nThe following packages will be downloaded:\n\n package | build\n ---------------------------|-----------------\n altair-4.0.1 | py_0 575 KB conda-forge\n folium-0.5.0 | py_0 45 KB conda-forge\n branca-0.3.1 | py_0 25 KB conda-forge\n vincent-0.4.4 | py_1 28 KB conda-forge\n ------------------------------------------------------------\n Total: 673 KB\n\nThe following NEW packages will be INSTALLED:\n\n altair: 4.0.1-py_0 conda-forge\n branca: 0.3.1-py_0 conda-forge\n folium: 0.5.0-py_0 conda-forge\n vincent: 0.4.4-py_1 conda-forge\n\n\nDownloading and Extracting Packages\naltair-4.0.1 | 575 KB | ##################################### | 100% \nfolium-0.5.0 | 45 KB | ##################################### | 100% \nbranca-0.3.1 | 25 KB | ##################################### | 100% \nvincent-0.4.4 | 28 KB | ##################################### | 100% \nPreparing transaction: done\nVerifying transaction: done\nExecuting transaction: done\n"
]
],
[
[
"### Get the dataframe from first task",
"_____no_output_____"
]
],
[
[
"# read data from html\nurl = 'https://en.wikipedia.org/wiki/List_of_postal_codes_of_Canada:_M'\nread_table = pd.read_html(url,header=[0])\ndf1 = read_table[0]\n\n# rename columns' name\ndf1 = df1.rename(index=str, columns={'Postcode':'PostalCode','Neighbourhood':'Neighborhood'})\n\n# Ignore cells with a borough that is Not assigned\ndf1 = df1[df1.Borough !='Not assigned']\ndf1.reset_index(drop=True,inplace=True)\n\n# groupby\ndf1 = df1.groupby('PostalCode',as_index=False).agg(lambda x: ','.join(set(x.dropna())))\n\n# If a cell has a borough but a Not assigned neighborhood, \n#then the neighborhood will be the same as the borough\ndf1.loc[df1['Neighborhood'] == 'Not assigned','Neighborhood'] = df1['Borough']\n\ndf1.head()",
"_____no_output_____"
]
],
[
[
"### Get the dataframe from Second task",
"_____no_output_____"
]
],
[
[
"df2 = pd.read_csv('http://cocl.us/Geospatial_data')\n\n# Change the columns' name\ndf2.columns = ['PostalCode','Latitude','Longitude']\n\n# Merge two dataframes\ndf = pd.merge(left=df1,right=df2,on='PostalCode')\ndf.head()",
"_____no_output_____"
]
],
[
[
"# Part 3 ",
"_____no_output_____"
],
[
"### Get Toronto data",
"_____no_output_____"
]
],
[
[
"Toronto = df[df['Borough'].str.contains('Toronto')].reset_index(drop=True)\nToronto.head()",
"_____no_output_____"
]
],
[
[
"### Get the latitude and longitude values of Toronto",
"_____no_output_____"
]
],
[
[
"address = 'Toronto'\ngeolocator = Nominatim()\nlocation = geolocator.geocode(address)\nlatitude = location.latitude\nlongitude = location.longitude\nprint('The geograpical coordinate of Toronto are{},{}.'.format(latitude,longitude))",
"The geograpical coordinate of Toronto are43.653963,-79.387207.\n"
]
],
[
[
"### Create map of Toronto",
"_____no_output_____"
]
],
[
[
"# create map of Toronto using latitude and longitude values\nmap_Toronto = folium.Map(location=[latitude, longitude], zoom_start=11)\n\n# add markers to map\nfor lat, lng, label in zip(Toronto['Latitude'], Toronto['Longitude'], Toronto['Neighborhood']):\n label = folium.Popup(label, parse_html=True)\n folium.CircleMarker(\n [lat, lng],\n radius=5,\n popup=label,\n color='blue',\n fill=True,\n fill_color='#3186cc',\n fill_opacity=0.7,\n parse_html=False).add_to(map_Toronto) \n \nmap_Toronto",
"_____no_output_____"
]
],
[
[
"### Define Foursquare Credentials and Version",
"_____no_output_____"
]
],
[
[
"CLIENT_ID = 'HCIWEYMLE0SJAI3ESV4AFX5PNQVBSLP5HQ1YU4GISAHHRIFV' # your Foursquare ID\nCLIENT_SECRET = 'P4KVBEVJDIVREULUPIZHUL124JX353PUIP5KWJOGX1PLDB5B' # your Foursquare Secret\nVERSION = '20200202' # Foursquare API version",
"_____no_output_____"
]
],
[
[
"### Try to explore other neighborhood\n#### pick up Etobicoke to be explored",
"_____no_output_____"
]
],
[
[
"Etobicoke = df[df['Borough']=='Etobicoke'].reset_index(drop=True)\n\n# get the Etobicoke latitude and longitude values\nEtobicoke_latitude = Etobicoke.loc[0,'Latitude']\nEtobicoke_longitude = Etobicoke.loc[0,'Longitude']\n\nprint('Latitude and longitude values of Etobicoke are {},{}.'.format(\n Etobicoke_latitude, Etobicoke_longitude))",
"Latitude and longitude values of Etobicoke are 43.6056466,-79.50132070000001.\n"
],
[
"map_Etobicoke = folium.Map(location=[latitude, longitude], zoom_start=11)\n\n# add markers to map\nfor lat, lng, label in zip(Etobicoke['Latitude'], Etobicoke['Longitude'], Etobicoke['Neighborhood']):\n label = folium.Popup(label, parse_html=True)\n folium.CircleMarker(\n [lat, lng],\n radius=5,\n popup=label,\n color='blue',\n fill=True,\n fill_color='#3186cc',\n fill_opacity=0.7,\n parse_html=False).add_to(map_Etobicoke) \n \nmap_Etobicoke",
"_____no_output_____"
]
],
[
[
"### Explore Neighborhoods in Toronto\n#### Getting the top 100 venues that are in Toronto within a raidus of 500 meters\n",
"_____no_output_____"
]
],
[
[
"LIMIT = 100\nradius = 500\n\ndef getNearbyVenues(names, latitudes, longitudes, radius=500):\n \n venues_list=[]\n for name, lat, lng in zip(names, latitudes, longitudes):\n print(name)\n \n # create the API request URL\n url = 'https://api.foursquare.com/v2/venues/explore?&client_id={}&client_secret={}&v={}&ll={},{}&radius={}&limit={}'.format(\n CLIENT_ID, \n CLIENT_SECRET, \n VERSION, \n lat, \n lng, \n radius, \n LIMIT)\n \n # make the GET request\n results = requests.get(url).json()[\"response\"]['groups'][0]['items']\n \n # return only relevant information for each nearby venue\n venues_list.append([(\n name, \n lat, \n lng, \n v['venue']['name'], \n v['venue']['location']['lat'], \n v['venue']['location']['lng'], \n v['venue']['categories'][0]['name']) for v in results])\n\n nearby_venues = pd.DataFrame([item for venue_list in venues_list for item in venue_list])\n nearby_venues.columns = ['Neighborhood', \n 'Neighborhood Latitude', \n 'Neighborhood Longitude', \n 'Venue', \n 'Venue Latitude', \n 'Venue Longitude', \n 'Venue Category']\n \n return(nearby_venues)\n\n\n# write the code to run the above function\nToronto_venues = getNearbyVenues(names=Toronto['Neighborhood'],\n latitudes=Toronto['Latitude'],\n longitudes=Toronto['Longitude'])\n",
"The Beaches\nThe Danforth West,Riverdale\nThe Beaches West,India Bazaar\nStudio District\nLawrence Park\nDavisville North\nNorth Toronto West\nDavisville\nMoore Park,Summerhill East\nRathnelly,Summerhill West,South Hill,Deer Park,Forest Hill SE\nRosedale\nCabbagetown,St. James Town\nChurch and Wellesley\nHarbourfront\nGarden District,Ryerson\nSt. James Town\nBerczy Park\nCentral Bay Street\nRichmond,Adelaide,King\nHarbourfront East,Union Station,Toronto Islands\nDesign Exchange,Toronto Dominion Centre\nCommerce Court,Victoria Hotel\nRoselawn\nForest Hill West,Forest Hill North\nYorkville,North Midtown,The Annex\nUniversity of Toronto,Harbord\nChinatown,Kensington Market,Grange Park\nSouth Niagara,CN Tower,Island airport,Bathurst Quay,Harbourfront West,Railway Lands,King and Spadina\nStn A PO Boxes 25 The Esplanade\nFirst Canadian Place,Underground city\nChristie\nDovercourt Village,Dufferin\nTrinity,Little Portugal\nParkdale Village,Brockton,Exhibition Place\nHigh Park,The Junction South\nRoncesvalles,Parkdale\nRunnymede,Swansea\nQueen's Park\nBusiness Reply Mail Processing Centre 969 Eastern\n"
]
],
[
[
"### Check the size of the resulting dataframe",
"_____no_output_____"
]
],
[
[
"print(Toronto_venues.shape)\nToronto_venues.head()",
"(1706, 7)\n"
]
],
[
[
"### Check how many venues were returned",
"_____no_output_____"
]
],
[
[
"Toronto_venues.groupby('Neighborhood').count()",
"_____no_output_____"
]
],
[
[
"### Find out how many unique categories can be curated from all the returned venues\n",
"_____no_output_____"
]
],
[
[
"print('There are {} uniques categories.'.format(len(Toronto_venues['Venue Category'].unique())))",
"There are 229 uniques categories.\n"
]
],
[
[
"### Analyze Neighborhood",
"_____no_output_____"
]
],
[
[
"# one hot encoding\nToronto_onehot = pd.get_dummies(Toronto_venues[['Venue Category']], prefix=\"\", prefix_sep=\"\")\n\n# add neighborhood column back to dataframe\nToronto_onehot['Neighborhood'] = Toronto_venues['Neighborhood'] \n\n# move neighborhood column to the first column\nfixed_columns = [Toronto_onehot.columns[-1]] + list(Toronto_onehot.columns[:-1])\nToronto_onehot = Toronto_onehot[fixed_columns]\n\nToronto_onehot.head()",
"_____no_output_____"
]
],
[
[
"### New dataframe shape",
"_____no_output_____"
]
],
[
[
"Toronto_onehot.shape",
"_____no_output_____"
]
],
[
[
"### Group rows by neigoborhood and by taking the mean of the frequency of occurrence of each category",
"_____no_output_____"
]
],
[
[
"Toronto_grouped = Toronto_onehot.groupby('Neighborhood').mean().reset_index()\nToronto_grouped",
"_____no_output_____"
]
],
[
[
"### New size after grouping",
"_____no_output_____"
]
],
[
[
"# new size\nToronto_grouped.shape",
"_____no_output_____"
]
],
[
[
"### Pick up top 10 venues",
"_____no_output_____"
]
],
[
[
"num_top_venues = 10\n\n# write a funcition to sort the venues in descending order\ndef return_most_common_venues(row, num_top_venues):\n row_categories = row.iloc[1:]\n row_categories_sorted = row_categories.sort_values(ascending=False) \n return row_categories_sorted.index.values[0:num_top_venues]\n\nindicators = ['st', 'nd', 'rd']\n\n# create columns according to number of top venues\ncolumns = ['Neighborhood']\nfor ind in np.arange(num_top_venues):\n try:\n columns.append('{}{} Most Common Venue'.format(ind+1, indicators[ind]))\n except:\n columns.append('{}th Most Common Venue'.format(ind+1))\n\n# create a new dataframe\nToronto_venues_sorted = pd.DataFrame(columns=columns)\nToronto_venues_sorted['Neighborhood'] = Toronto_grouped['Neighborhood']\n\nfor ind in np.arange(Toronto_grouped.shape[0]):\n Toronto_venues_sorted.iloc[ind, 1:] = return_most_common_venues(Toronto_grouped.iloc[ind, :], num_top_venues)\n\nToronto_venues_sorted",
"_____no_output_____"
]
],
[
[
"### Cluster Neighborhoods\nRun k-means to cluster the neighborhood into 5 clusters.",
"_____no_output_____"
]
],
[
[
"# set number of clusters\nkclusters = 5\n\nToronto_grouped_clustering = Toronto_grouped.drop('Neighborhood', 1)\n\n# run k-means clustering\nkmeans = KMeans(n_clusters=kclusters, random_state=0).fit(Toronto_grouped_clustering)\n\n# check cluster labels generated for each row in the dataframe\nkmeans.labels_[0:10]",
"_____no_output_____"
],
[
"Toronto_merged = Toronto\n\n# add clustering labels\nToronto_merged['Cluster Labels'] = kmeans.labels_\n\n# merge toronto_grouped with toronto_data to add latitude/longitude for each neighborhood\nToronto_merged = Toronto_merged.join(Toronto_venues_sorted.set_index('Neighborhood'), on='Neighborhood')\n\nToronto_merged.head() # check the last columns!",
"_____no_output_____"
]
],
[
[
"### Visualize the resulting clusters",
"_____no_output_____"
]
],
[
[
"# create map\nmap_clusters = folium.Map(location=[latitude, longitude], zoom_start=11)\n\n# set color scheme for the clusters\nx = np.arange(kclusters)\nys = [i+x+(i*x)**2 for i in range(kclusters)]\ncolors_array = cm.rainbow(np.linspace(0, 1, len(ys)))\nrainbow = [colors.rgb2hex(i) for i in colors_array]\n\n# add markers to the map\nmarkers_colors = []\nfor lat, lon, poi, cluster in zip(Toronto_merged['Latitude'], Toronto_merged['Longitude'], \n Toronto_merged['Neighborhood'], \n Toronto_merged['Cluster Labels']):\n label = folium.Popup(str(poi) + ' Cluster ' + str(cluster), parse_html=True)\n folium.CircleMarker(\n [lat, lon],\n radius=5,\n popup=label,\n color=rainbow[cluster-1],\n fill=True,\n fill_color=rainbow[cluster-1],\n fill_opacity=0.7).add_to(map_clusters)\n \nmap_clusters",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a63a1c71ca9a2d6fad9181f44706d2938cae18f
| 10,017 |
ipynb
|
Jupyter Notebook
|
t81_558_class_05_3_keras_l1_l2.ipynb
|
akramsystems/t81_558_deep_learning
|
a025888f0f609402746b98e40abc9d1ed15c5e87
|
[
"Apache-2.0"
] | 1 |
2020-03-08T12:54:45.000Z
|
2020-03-08T12:54:45.000Z
|
t81_558_class_05_3_keras_l1_l2.ipynb
|
KaShel06/t81_558_deep_learning
|
8beec2fdd305dd7e8937ac9da9e7c59f5fc4bb78
|
[
"Apache-2.0"
] | null | null | null |
t81_558_class_05_3_keras_l1_l2.ipynb
|
KaShel06/t81_558_deep_learning
|
8beec2fdd305dd7e8937ac9da9e7c59f5fc4bb78
|
[
"Apache-2.0"
] | 1 |
2021-07-21T17:58:31.000Z
|
2021-07-21T17:58:31.000Z
| 39.282353 | 491 | 0.611061 |
[
[
[
"<a href=\"https://colab.research.google.com/github/jeffheaton/t81_558_deep_learning/blob/master/t81_558_class_05_3_keras_l1_l2.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
],
[
"# T81-558: Applications of Deep Neural Networks\n**Module 5: Regularization and Dropout**\n* Instructor: [Jeff Heaton](https://sites.wustl.edu/jeffheaton/), McKelvey School of Engineering, [Washington University in St. Louis](https://engineering.wustl.edu/Programs/Pages/default.aspx)\n* For more information visit the [class website](https://sites.wustl.edu/jeffheaton/t81-558/).",
"_____no_output_____"
],
[
"# Module 5 Material\n\n* Part 5.1: Part 5.1: Introduction to Regularization: Ridge and Lasso [[Video]](https://www.youtube.com/watch?v=jfgRtCYjoBs&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_05_1_reg_ridge_lasso.ipynb)\n* Part 5.2: Using K-Fold Cross Validation with Keras [[Video]](https://www.youtube.com/watch?v=maiQf8ray_s&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_05_2_kfold.ipynb)\n* **Part 5.3: Using L1 and L2 Regularization with Keras to Decrease Overfitting** [[Video]](https://www.youtube.com/watch?v=JEWzWv1fBFQ&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_05_3_keras_l1_l2.ipynb)\n* Part 5.4: Drop Out for Keras to Decrease Overfitting [[Video]](https://www.youtube.com/watch?v=bRyOi0L6Rs8&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_05_4_dropout.ipynb)\n* Part 5.5: Benchmarking Keras Deep Learning Regularization Techniques [[Video]](https://www.youtube.com/watch?v=1NLBwPumUAs&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_05_5_bootstrap.ipynb)\n",
"_____no_output_____"
],
[
"# Google CoLab Instructions\n\nThe following code ensures that Google CoLab is running the correct version of TensorFlow.",
"_____no_output_____"
]
],
[
[
"try:\n %tensorflow_version 2.x\n COLAB = True\n print(\"Note: using Google CoLab\")\nexcept:\n print(\"Note: not using Google CoLab\")\n COLAB = False",
"Note: not using Google CoLab\n"
]
],
[
[
"# Part 5.3: Using L1 and L2 Regularization with Keras to Decrease Overfitting",
"_____no_output_____"
],
[
"L1 and L2 regularization are two common regularization techniques that can reduce the effects of overfitting (Ng, 2004). Both of these algorithms can either work with an objective function or as a part of the backpropagation algorithm. In both cases the regularization algorithm is attached to the training algorithm by adding an additional objective. \n\nBoth of these algorithms work by adding a weight penalty to the neural network training. This penalty encourages the neural network to keep the weights to small values. Both L1 and L2 calculate this penalty differently. For gradient-descent-based algorithms, such as backpropagation, you can add this penalty calculation to the calculated gradients. For objective-function-based training, such as simulated annealing, the penalty is negatively combined with the objective score.\n\nBoth L1 and L2 work differently in the way that they penalize the size of a weight. L2 will force the weights into a pattern similar to a Gaussian distribution; the L1 will force the weights into a pattern similar to a Laplace distribution, as demonstrated the following:\n\n\n\nAs you can see, L1 algorithm is more tolerant of weights further from 0, whereas the L2 algorithm is less tolerant. We will highlight other important differences between L1 and L2 in the following sections. You also need to note that both L1 and L2 count their penalties based only on weights; they do not count penalties on bias values.\n\nTensor flow allows [l1/l2 to be directly added to your network](http://tensorlayer.readthedocs.io/en/stable/modules/cost.html).",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nfrom scipy.stats import zscore\n\n# Read the data set\ndf = pd.read_csv(\n \"https://data.heatonresearch.com/data/t81-558/jh-simple-dataset.csv\",\n na_values=['NA','?'])\n\n# Generate dummies for job\ndf = pd.concat([df,pd.get_dummies(df['job'],prefix=\"job\")],axis=1)\ndf.drop('job', axis=1, inplace=True)\n\n# Generate dummies for area\ndf = pd.concat([df,pd.get_dummies(df['area'],prefix=\"area\")],axis=1)\ndf.drop('area', axis=1, inplace=True)\n\n# Missing values for income\nmed = df['income'].median()\ndf['income'] = df['income'].fillna(med)\n\n# Standardize ranges\ndf['income'] = zscore(df['income'])\ndf['aspect'] = zscore(df['aspect'])\ndf['save_rate'] = zscore(df['save_rate'])\ndf['age'] = zscore(df['age'])\ndf['subscriptions'] = zscore(df['subscriptions'])\n\n# Convert to numpy - Classification\nx_columns = df.columns.drop('product').drop('id')\nx = df[x_columns].values\ndummies = pd.get_dummies(df['product']) # Classification\nproducts = dummies.columns\ny = dummies.values",
"_____no_output_____"
],
[
"########################################\n# Keras with L1/L2 for Regression\n########################################\n\nimport pandas as pd\nimport os\nimport numpy as np\nfrom sklearn import metrics\nfrom sklearn.model_selection import KFold\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Dense, Activation\nfrom tensorflow.keras import regularizers\n\n# Cross-validate\nkf = KFold(5, shuffle=True, random_state=42)\n \noos_y = []\noos_pred = []\nfold = 0\n\nfor train, test in kf.split(x):\n fold+=1\n print(f\"Fold #{fold}\")\n \n x_train = x[train]\n y_train = y[train]\n x_test = x[test]\n y_test = y[test]\n \n #kernel_regularizer=regularizers.l2(0.01),\n \n model = Sequential()\n model.add(Dense(50, input_dim=x.shape[1], \n activation='relu',\n activity_regularizer=regularizers.l1(1e-4))) # Hidden 1\n model.add(Dense(25, activation='relu', \n activity_regularizer=regularizers.l1(1e-4))) # Hidden 2\n model.add(Dense(y.shape[1],activation='softmax')) # Output\n model.compile(loss='categorical_crossentropy', optimizer='adam')\n\n model.fit(x_train,y_train,validation_data=(x_test,y_test),verbose=0,epochs=500)\n \n pred = model.predict(x_test)\n \n oos_y.append(y_test)\n pred = np.argmax(pred,axis=1) # raw probabilities to chosen class (highest probability)\n oos_pred.append(pred) \n\n # Measure this fold's accuracy\n y_compare = np.argmax(y_test,axis=1) # For accuracy calculation\n score = metrics.accuracy_score(y_compare, pred)\n print(f\"Fold score (accuracy): {score}\")\n\n\n# Build the oos prediction list and calculate the error.\noos_y = np.concatenate(oos_y)\noos_pred = np.concatenate(oos_pred)\noos_y_compare = np.argmax(oos_y,axis=1) # For accuracy calculation\n\nscore = metrics.accuracy_score(oos_y_compare, oos_pred)\nprint(f\"Final score (accuracy): {score}\") \n \n# Write the cross-validated prediction\noos_y = pd.DataFrame(oos_y)\noos_pred = pd.DataFrame(oos_pred)\noosDF = pd.concat( [df, oos_y, oos_pred],axis=1 )\n#oosDF.to_csv(filename_write,index=False)\n\n",
"Fold #1\nFold score (accuracy): 0.64\nFold #2\nFold score (accuracy): 0.6775\nFold #3\nFold score (accuracy): 0.6825\nFold #4\nFold score (accuracy): 0.6675\nFold #5\nFold score (accuracy): 0.645\nFinal score (accuracy): 0.6625\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
]
] |
4a63aa5d39a6d30bcbe85c23e05a7e5084ed9877
| 6,854 |
ipynb
|
Jupyter Notebook
|
15. For Loop in R with Examples for List and Matrix.ipynb
|
reddyprasade/Data-Science-With-R
|
983a1eb2572a376189823b0450744bce203c0abb
|
[
"MIT"
] | 1 |
2020-05-12T02:11:34.000Z
|
2020-05-12T02:11:34.000Z
|
15. For Loop in R with Examples for List and Matrix.ipynb
|
reddyprasade/Data-Science-With-R
|
983a1eb2572a376189823b0450744bce203c0abb
|
[
"MIT"
] | null | null | null |
15. For Loop in R with Examples for List and Matrix.ipynb
|
reddyprasade/Data-Science-With-R
|
983a1eb2572a376189823b0450744bce203c0abb
|
[
"MIT"
] | 4 |
2019-10-25T09:23:15.000Z
|
2021-09-02T03:22:14.000Z
| 24.833333 | 385 | 0.478699 |
[
[
[
"A for loop is very valuable when we need to iterate over a list of elements or a range of numbers. Loop can be used to iterate over a list, data frame, vector, matrix or any other object. The braces and square bracket a\n\n\n#### For Loop Syntax and Examples",
"_____no_output_____"
]
],
[
[
"For (i in vector) {\n Exp\t\n}",
"_____no_output_____"
]
],
[
[
"R will loop over all the variables in vector and do the computation written inside the exp.\n\n",
"_____no_output_____"
],
[
"##### Example 1: \nWe iterate over all the elements of a vector and print the current value.",
"_____no_output_____"
]
],
[
[
"# Create fruit vector\nfruit <- c('Apple', 'Orange', 'Passion fruit', 'Banana')\n# Create the for statement\nfor ( i in fruit){ \n print(i)\n}",
"[1] \"Apple\"\n[1] \"Orange\"\n[1] \"Passion fruit\"\n[1] \"Banana\"\n"
],
[
"seq(1, 4, by=1)",
"_____no_output_____"
],
[
"# Create an empty list\nlist <- c()\n# Create a for statement to populate the list\nfor (i in seq(1, 4, by=1)) {\n list[[i]] <- i*i\n}\nprint(list)",
"[1] 1 4 9 16\n"
],
[
"# List of Element In a vector\nl <-c(5,2,4,2,8,9,10,25,30,23)\n\n# it is loop\nfor (i in l){\n # Condition Statement\n if (i%%2==0){\n print(\"Given Number is Even:\")\n }else{\n print(\"Given Number is Odd\")\n }\n}",
"[1] \"Given Number is Even:\"\n[1] \"Given Number is Even:\"\n[1] \"Given Number is Even:\"\n[1] \"Given Number is Even:\"\n[1] \"Given Number is Even:\"\n[1] \"Given Number is Even:\"\n"
]
],
[
[
"The for loop is very valuable for machine learning tasks. After we have trained a model, we need to regularize the model to avoid over-fitting. Regularization is a very tedious task because we need to find the value that minimizes the loss function. To help us detect those values, we can make use of a for loop to iterate over a range of values and define the best candidate.\n\n### For Loop over a list\n\n Looping over a list is just as easy and convenient as looping over a vector. Let's see an example\n\n",
"_____no_output_____"
]
],
[
[
"# Create a list with three vectors\nfruit <- list(Basket = c('Apple', 'Orange', 'Passion fruit', 'Banana'), \nMoney = c(10, 12, 15,10), purchase = FALSE)\nfor (p in fruit) \n{ \n print(p)\n}",
"[1] \"Apple\" \"Orange\" \"Passion fruit\" \"Banana\" \n[1] 10 12 15 10\n[1] FALSE\n"
]
],
[
[
"### For Loop over a matrix\n\nA matrix has 2-dimension, rows and columns. To iterate over a matrix, we have to define two for loop, namely one for the rows and another for the column.",
"_____no_output_____"
]
],
[
[
"# Create a matrix\nmat <- matrix(data = seq(1, 12, by=1), nrow = 6, ncol =2)\nprint(mat)\n# Create the loop with r and c to iterate over the matrix\nfor (r in 1:nrow(mat)) \n for (c in 1:ncol(mat)) \n print(paste(\"Row\", r, \"and column\",c, \"have values of\", mat[r,c])) ",
" [,1] [,2]\n[1,] 1 7\n[2,] 2 8\n[3,] 3 9\n[4,] 4 10\n[5,] 5 11\n[6,] 6 12\n[1] \"Row 1 and column 1 have values of 1\"\n[1] \"Row 1 and column 2 have values of 7\"\n[1] \"Row 2 and column 1 have values of 2\"\n[1] \"Row 2 and column 2 have values of 8\"\n[1] \"Row 3 and column 1 have values of 3\"\n[1] \"Row 3 and column 2 have values of 9\"\n[1] \"Row 4 and column 1 have values of 4\"\n[1] \"Row 4 and column 2 have values of 10\"\n[1] \"Row 5 and column 1 have values of 5\"\n[1] \"Row 5 and column 2 have values of 11\"\n[1] \"Row 6 and column 1 have values of 6\"\n[1] \"Row 6 and column 2 have values of 12\"\n"
]
]
] |
[
"markdown",
"raw",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"raw"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a63b1ced1f49d5cf67d73e5729669be4416be53
| 232,075 |
ipynb
|
Jupyter Notebook
|
_Saving a web page to scrape later.ipynb
|
cjwinchester/nicar-2019-scraping
|
0d95f3432a4f238e5ab451a05968c4f557224f30
|
[
"MIT"
] | 6 |
2019-03-09T22:27:39.000Z
|
2019-03-21T16:51:54.000Z
|
_Saving a web page to scrape later.ipynb
|
cjwinchester/nicar-2019-scraping
|
0d95f3432a4f238e5ab451a05968c4f557224f30
|
[
"MIT"
] | 4 |
2020-03-24T17:04:16.000Z
|
2021-02-02T22:11:23.000Z
|
_Saving a web page to scrape later.ipynb
|
cjwinchester/nicar-2019-scraping
|
0d95f3432a4f238e5ab451a05968c4f557224f30
|
[
"MIT"
] | 2 |
2019-03-10T22:28:29.000Z
|
2020-01-07T04:17:35.000Z
| 1,234.441489 | 117,590 | 0.542941 |
[
[
[
"# Saving a web page to scrape later\n\nFor many scraping jobs, it makes sense to first save a copy of the web page (or pages) that you want to scrape and then operate on the local files you've saved. This is a good practice for a couple of reasons: You won't be bombarding your target server with requests every time you fiddle with your script and rerun it, and you've got a copy saved in case the page (or pages) disappear.\n\nHere's one way to accomplish that. (If you haven't run through [the notebook on using `requests` to fetch web pages](02.%20Fetching%20HTML%20with%20requests.ipynb), do that first.)\n\nWe'll need the `requests` and `bs4` libraries, so let's start by importing them:",
"_____no_output_____"
]
],
[
[
"import requests\nimport bs4",
"_____no_output_____"
]
],
[
[
"## Fetch the page and write to file\n\nLet's grab the Texas death row page: `'https://www.tdcj.texas.gov/death_row/dr_offenders_on_dr.html'`",
"_____no_output_____"
]
],
[
[
"dr_page = requests.get('https://www.tdcj.texas.gov/death_row/dr_offenders_on_dr.html')",
"_____no_output_____"
],
[
"# take a peek at the HTML\ndr_page.text",
"_____no_output_____"
]
],
[
[
"Now, instead of continuing on with our scraping journey, we'll use some built-in Python tools to write this to file:",
"_____no_output_____"
]
],
[
[
"# define a name for the file we're saving to\nHTML_FILE_NAME = 'death-row-page.html'",
"_____no_output_____"
],
[
"# open that file in write mode and write the page's HTML into it\nwith open(HTML_FILE_NAME, 'w') as outfile:\n outfile.write(dr_page.text)",
"_____no_output_____"
]
],
[
[
"The `with` block is just a handy way to deal with opening and closing files -- note that everything under the `with` line is indented.\n\nThe `open()` function is used to open files for reading or writing. The first _argument_ that you hand this function is the name of the file you're going to be working on -- we defined it above and attached it to the `HTML_FILE_NAME` variable, which is totally arbitrary. (We could have called it `HTML_BANANAGRAM` if we wanted to.)\n\nThe `'w'` means that we're opening the file in \"write\" mode. We're also tagging the opened file with a variable name using the `as` operator -- `outfile` is an arbitrary variable name that I came up with.\n\nBut then we'll use that variable name to do things to the file we've opened. In this case, we want to use the file object's `write()` method to write some content to the file.\n\nWhat content? The HTML of the page we grabbed, which is accessible through the `.text` attribute.\n\nIn human words, this block of code is saying: \"Open a file called `death-row-page.html` and write the HTML of thata death row page you grabbed earlier into it.\"",
"_____no_output_____"
],
[
"## Reading the HTML from a saved web page\n\nAt some point after you've saved your page to file, eventually you'll want to scrape it. To read the HTML into a variable, we'll use a `with` block again, but this time we'll specify \"read mode\" (`'r'`) and use the `read()` method instead of the `write()` method:",
"_____no_output_____"
]
],
[
[
"with open(HTML_FILE_NAME, 'r') as infile:\n html = infile.read()",
"_____no_output_____"
],
[
"html",
"_____no_output_____"
]
],
[
[
"Now it's just a matter of turning that HTML into soup -- [see this notebook for more details](03.%20Parsing%20HTML%20with%20BeautifulSoup.ipynb) -- and parsing the results.",
"_____no_output_____"
]
],
[
[
"soup = bs4.BeautifulSoup(html, 'html.parser')",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a63b6ac77a560349b80fb41bfe8088461f91936
| 97,108 |
ipynb
|
Jupyter Notebook
|
Note_books/Explore_Models/model1_v1_ridge_train_on_1st_half_only.ipynb
|
joeyamosjohns/final_project_nhl_prediction_first_draft
|
8bffe1c82c76ec4aa8482d38d9eb5efad1644496
|
[
"MIT"
] | null | null | null |
Note_books/Explore_Models/model1_v1_ridge_train_on_1st_half_only.ipynb
|
joeyamosjohns/final_project_nhl_prediction_first_draft
|
8bffe1c82c76ec4aa8482d38d9eb5efad1644496
|
[
"MIT"
] | null | null | null |
Note_books/Explore_Models/model1_v1_ridge_train_on_1st_half_only.ipynb
|
joeyamosjohns/final_project_nhl_prediction_first_draft
|
8bffe1c82c76ec4aa8482d38d9eb5efad1644496
|
[
"MIT"
] | null | null | null | 35.965926 | 14,096 | 0.43636 |
[
[
[
"import pandas as pd\n\nnhl_games= pd.read_csv(\"/Users/joejohns/data_bootcamp/GitHub/final_project_nhl_prediction/Data/Kaggle_Data_Ellis/game.csv\")",
"_____no_output_____"
],
[
"nhl_games.columns",
"_____no_output_____"
],
[
"nhl_20162017 = nhl_games.loc[(nhl_games['season'] == 20162017)&(nhl_games['type'] == 'R') , ['game_id', 'season', 'type', 'date_time_GMT', 'away_team_id',\n 'home_team_id', 'away_goals', 'home_goals', 'outcome']]",
"_____no_output_____"
],
[
"\n\n\nnhl_20162017['date_time_GMT'] = pd.to_datetime(nhl_20162017['date_time_GMT'])",
"_____no_output_____"
],
[
"nhl_20162017.loc[0,'date_time_GMT']\n\nnhl_20162017.sort_values(by='date_time_GMT', inplace = True) ",
"_____no_output_____"
],
[
"\n#nhl_team.loc[(nhl_mp[\"settled_in\"] == 'tbc'), :]\nnhl_20162017.head(20)\n",
"_____no_output_____"
],
[
"team_info = pd.read_csv(\"/Users/joejohns/data_bootcamp/GitHub/final_project_nhl_prediction/Data/Kaggle_Data_Ellis/team_info.csv\" )",
"_____no_output_____"
],
[
"team_info.dtypes",
"_____no_output_____"
]
],
[
[
"team_info.head()",
"_____no_output_____"
]
],
[
[
"team_info.head()",
"_____no_output_____"
],
[
"team_info.loc[team_info['team_id'] == 1, 'abbreviation'][0]",
"_____no_output_____"
],
[
"def map_names(team_id):\n index = team_info.loc[team_info['team_id'] == team_id, 'abbreviation'].index[0]\n return team_info.loc[team_info['team_id'] == team_id, 'abbreviation'][index]\n",
"_____no_output_____"
],
[
"map_names(10)",
"_____no_output_____"
],
[
"team_info.loc[:,['team_id', 'abbreviation']]",
"_____no_output_____"
],
[
"nhl_20162017['away_team_id'] = nhl_20162017['away_team_id'].map(map_names) ",
"_____no_output_____"
],
[
"nhl_20162017['home_team_id'] = nhl_20162017['home_team_id'].map(map_names)",
"_____no_output_____"
],
[
"nhl_20162017",
"_____no_output_____"
],
[
"nhl_20162017 = nhl_20162017.reset_index(drop = True)",
"_____no_output_____"
],
[
"df = nhl_20162017.copy()\ndf",
"_____no_output_____"
],
[
"import pandas as pd\nimport numpy as np\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.linear_model import Ridge",
"_____no_output_____"
],
[
"df['goal_difference'] = df['home_goals'] - df['away_goals']",
"_____no_output_____"
],
[
"# create new variables to show home team win or loss result\ndf['home_win'] = np.where(df['goal_difference'] > 0, 1, 0)\ndf['home_loss'] = np.where(df['goal_difference'] < 0, 1, 0)",
"_____no_output_____"
],
[
"df.head(6)",
"_____no_output_____"
],
[
"df_visitor = pd.get_dummies(df['away_team_id'], dtype=np.int64)\ndf_home = pd.get_dummies(df['home_team_id'], dtype=np.int64)",
"_____no_output_____"
],
[
"df_visitor.head(3)",
"_____no_output_____"
],
[
"df_model = df_home.sub(df_visitor) \ndf_model['goal_difference'] = df['goal_difference']",
"_____no_output_____"
],
[
"df_model",
"_____no_output_____"
],
[
"df_train = df_model.iloc[:600, :].copy()\ndf_test = df_model.iloc[600:, :].copy()",
"_____no_output_____"
],
[
"###test \nlr.fit \n",
"_____no_output_____"
],
[
"%time \n\nlr = Ridge(alpha=0.001) \nX = df_train.drop(['goal_difference'], axis=1)\ny = df_train['goal_difference']\n\nlr.fit(X, y)",
"CPU times: user 4 µs, sys: 1e+03 ns, total: 5 µs\nWall time: 8.11 µs\n"
],
[
"df_ratings = pd.DataFrame(data={'team': X.columns, 'rating': lr.coef_})\ndf_ratings",
"_____no_output_____"
],
[
"##test this on rest of this season WITHOUT updating the rankings as we go!",
"_____no_output_____"
],
[
"X_test = df_test.drop(['goal_difference'], axis=1)\ny_test = df_test['goal_difference']\n\n\ny_pred = lr.predict(X_test)",
"_____no_output_____"
],
[
"df_test['goal_difference']\n",
"_____no_output_____"
],
[
"def make_win(y):\n if y >0:\n return 1\n if y< 0:\n return 0",
"_____no_output_____"
],
[
"\n\ny_test_win = pd.Series(y_test).map(make_win)",
"_____no_output_____"
],
[
"y_pred_win = pd.Series(y_pred).map(make_win)",
"_____no_output_____"
],
[
"y_pred_win.value_counts()",
"_____no_output_____"
],
[
"y_test_win.value_counts()",
"_____no_output_____"
],
[
"from sklearn.metrics import accuracy_score, precision_score, recall_score, roc_auc_score, confusion_matrix\nfrom sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score, f1_score\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n",
"_____no_output_____"
],
[
"\ndef evaluate_binary_classification(model_name, y_test, y_pred, y_proba=None):\n accuracy = accuracy_score(y_test, y_pred)\n precision = precision_score(y_test, y_pred)\n recall = recall_score(y_test, y_pred)\n f1 = f1_score(y_test, y_pred)\n #try:\n if y_proba != None:\n rocauc_score = roc_auc_score(y_test, y_proba)\n else:\n rocauc_score = \"no roc\"\n #except: \n # pass \n cm = confusion_matrix(y_test, y_pred)\n sns.heatmap(cm, annot=True)\n plt.tight_layout()\n plt.title(f'{model_name}', y=1.1)\n plt.ylabel('Actual label')\n plt.xlabel('Predicted label')\n plt.show()\n print(\"accuracy: \", accuracy)\n print(\"precision: \", precision)\n print(\"recall: \", recall)\n print(\"f1 score: \", f1)\n print(\"rocauc: \", rocauc_score)\n print(cm)\n #return accuracy, precision, recall, f1, rocauc_score",
"_____no_output_____"
],
[
"evaluate_binary_classification('ridge_regression', y_test_win, y_pred_win)",
"_____no_output_____"
],
[
"from sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score, f1_score\n\ndef evaluate_regression(y_test, y_pred):\n mae = mean_absolute_error(y_test, y_pred)\n mse = mean_squared_error(y_test, y_pred)\n r2 = r2_score(y_test, y_pred)\n print(\"mae\", mae)\n print(\"mse\", mse)\n print('r2', r2)",
"_____no_output_____"
],
[
"evaluate_regression(y_test, y_pred)",
"mae 1.9137773622272165\nmse 5.305807171452987\nr2 0.030844151607030246\n"
],
[
"##off by 2 goals on avg?? that's really bad",
"_____no_output_____"
],
[
"len(y_test_win)",
"_____no_output_____"
],
[
"pred_res= pd.DataFrame({ 'pred_win': list(y_pred_win), 'actual_win': list(y_test_win), 'pred_GD': y_pred, 'actual_GD': y_test })",
"_____no_output_____"
],
[
"pred_res.iloc[:20,:]",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a63c7ea487d96b77d1029135e0a4747344d164f
| 167,074 |
ipynb
|
Jupyter Notebook
|
home-work2.ipynb
|
vladdez/RD_forscasting
|
2b0445bc1ff65bda71257cadaf1aa093fc149995
|
[
"MIT"
] | null | null | null |
home-work2.ipynb
|
vladdez/RD_forscasting
|
2b0445bc1ff65bda71257cadaf1aa093fc149995
|
[
"MIT"
] | null | null | null |
home-work2.ipynb
|
vladdez/RD_forscasting
|
2b0445bc1ff65bda71257cadaf1aa093fc149995
|
[
"MIT"
] | null | null | null | 64.507336 | 31,940 | 0.66042 |
[
[
[
"Импортируйте нужные библиотеки\n===\nМы уже много работали с pandas, numpy и различными инструментами визуализации. Поэтому предлагаем вам самим импортировать те библиотеки, которые вам нужны для выполнения домашнего задания \n\nВременной ряд \n=== \nВ папке datasets у нас находится множество различных временных рядов, которые состоят из двух колонок - времени и значения. Можно взять любые данные на выбор, кроме того, который мы разобрали на лекции (shampoo_sales). \n\nЗадание \n==\n\n* Сделайте визуализацию ряда\n* Разбейте его на компоненты\n* Проверьте автокорреляцию\n* Примените одну или несколько трансформаций, разобранных на лекции",
"_____no_output_____"
],
[
"Загрузка библиотек",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport numpy as np\nfrom scipy.stats import boxcox\nfrom plotly.offline import init_notebook_mode, iplot\nfrom plotly.graph_objects import *\nfrom statsmodels.tsa.seasonal import seasonal_decompose",
"_____no_output_____"
],
[
"df = pd.read_csv('../datatest/airline-passengers.csv')\ndf['Month'] = pd.to_datetime(df['Month'])\n#df.set_index('Month', inplace=True)",
"_____no_output_____"
],
[
"df",
"_____no_output_____"
]
],
[
[
"Визуализация ряда",
"_____no_output_____"
]
],
[
[
"iplot(Figure(data=Scatter(x=df[\"Month\"], y=df[\"Passengers\"])))",
"_____no_output_____"
],
[
"s = pd.Series(index=df['Month'],\n data=df['Passengers'].values)",
"_____no_output_____"
]
],
[
[
"Декомпозиция ряда",
"_____no_output_____"
]
],
[
[
"results = seasonal_decompose(s, 'additive')",
"_____no_output_____"
],
[
"results.trend.plot()",
"_____no_output_____"
],
[
"results.seasonal.plot()",
"_____no_output_____"
],
[
"results.resid.plot()",
"_____no_output_____"
]
],
[
[
"Проверка автокорреляции",
"_____no_output_____"
]
],
[
[
"df['Passengers_lag1'] = df['Passengers'].shift(1)\ndf.corr()['Passengers_lag1']",
"_____no_output_____"
]
],
[
[
"Применение трансформации",
"_____no_output_____"
]
],
[
[
"df['boxcox']=boxcox(df['Passengers'],lmbda=0)\niplot(Figure(data=Scatter(x=df['Month'],y=df['boxcox'])))",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a63c88fe652466be4d9bf028bb6ed0dec69436e
| 9,934 |
ipynb
|
Jupyter Notebook
|
Tutorial/2-Research-Data-Visualisations/2.2-Geopandas/shapefiles-with-geopandas.ipynb
|
GongdaLu/Python-for-Atmospheric-Scientists
|
3beec3466c26768fc07b309df0584f67911629a3
|
[
"MIT"
] | null | null | null |
Tutorial/2-Research-Data-Visualisations/2.2-Geopandas/shapefiles-with-geopandas.ipynb
|
GongdaLu/Python-for-Atmospheric-Scientists
|
3beec3466c26768fc07b309df0584f67911629a3
|
[
"MIT"
] | null | null | null |
Tutorial/2-Research-Data-Visualisations/2.2-Geopandas/shapefiles-with-geopandas.ipynb
|
GongdaLu/Python-for-Atmospheric-Scientists
|
3beec3466c26768fc07b309df0584f67911629a3
|
[
"MIT"
] | null | null | null | 38.653696 | 162 | 0.553553 |
[
[
[
"# Handle shapefiles using Geopandas",
"_____no_output_____"
]
],
[
[
"###############################################################################################\n###############################################################################################\n# Part 1: work with shapefiles\n# I am using a \"shapefile\" which consists of at least four actual files (.shp, .shx, .dbf, .prj). This is a commonly used format.\n# The new \".rds\" format shapefiles seem to be designed only for use in R programming (For more about shapefile formats: https://gadm.org/formats.html).\n# An example shapefiles source: https://gadm.org/download_country_v3.html",
"_____no_output_____"
],
[
"###############################################################################################\n# Method 1 (Matplotlib + Cartopy)\nimport matplotlib.pyplot as plt\nimport cartopy.crs as ccrs\nfrom cartopy.io.shapereader import Reader\nfrom cartopy.feature import ShapelyFeature\n\n# set up working directory\nimport os\nos.chdir(\"move-to-your-working-directory\")\n\n# import the shapefile\nUK_shape_file = r'gadm36_GBR_3.shp'\n# get the map (geometries)\nUK_map = ShapelyFeature(Reader(UK_shape_file).geometries(),ccrs.PlateCarree(), edgecolor='black',facecolor='none')\n# initialize a plot\ntest= plt.axes(projection=ccrs.PlateCarree())\n# add the shapefile for the whole UK\ntest.add_feature(UK_map) \n# zoom in to London\ntest.set_extent([-2,2,51,52], crs=ccrs.PlateCarree()) ",
"_____no_output_____"
],
[
"###############################################################################################\n# Method 2 (Matplotlib + Cartopy + Geopandas)\nimport matplotlib.pyplot as plt\nimport cartopy.crs as ccrs\nimport geopandas as gpd\n\n# set up working directory\nimport os\nos.chdir(\"move-to-your-working-directory\")\n\n# read the UK shapefile as a \"geopandas.geodataframe.GeoDataFrame\"\nUK_shapefile = gpd.read_file(\"gadm36_GBR_3.shp\")\n# check what UK shapefile contains\nUK_shapefile\n# get shapes for London, Birmingham and Edinburgh from the UK shapefile\n# you can also go to a coarser/finer layer to select a bigger/smaller domain\nLondon = UK_shapefile[UK_shapefile['NAME_2'] == \"Greater London\"]\nBirmingham = UK_shapefile[UK_shapefile['NAME_2'] == \"Birmingham\"]\nEdinburgh = UK_shapefile[UK_shapefile['NAME_2'] == \"Edinburgh\"]\n\n# check the geometry for each city\nprint(London.geometry)\nprint(Birmingham.geometry)\nprint(Edinburgh.geometry)\n\n# create a list of your study cities and merge the shapes (geopandas.geodataframe.GeoDataFrame)\nimport pandas as pd\nstudy_cities = [London,Birmingham,Edinburgh]\nstudy_cities_shapes = gpd.GeoDataFrame(pd.concat(study_cities, ignore_index=True))\n\n# initialize a plot\ntest= plt.axes(projection=ccrs.PlateCarree())\n# add shapefiles to your chosen cities only\n# you can change \"edgecolor\", \"facecolor\" and \"linewidth\" to highlight certain areas\n# you can change the \"zorder\" to decide the layer\ntest.add_geometries(study_cities_shapes.geometry, crs=ccrs.PlateCarree(),edgecolor='black',facecolor='none',linewidth=2,zorder=0)\n\n# zoom in to your study domain\ntest.set_extent([-5,2,51,57], crs=ccrs.PlateCarree())",
"_____no_output_____"
],
[
"# Part 1 Remarks:\n# 1> I prefer Method 2 as \"geopandas\" allows more control of the shapefile\n# 2> But it is almost impossible to install geopandas following the instructions on its homepage: https://geopandas.org/install.html\n# I managed to install it on my windows PC following this video: https://www.youtube.com/watch?v=LNPETGKAe0c\n# 3> Method 1 is easy to use on all platforms, althought there is less control of the shapefile",
"_____no_output_____"
],
[
"###############################################################################################\n# Part 2: some techniques with \"polygons\"\n# sometimes, we may want to know which data sites are within our outside a certain area\n# or we may want to know if two areas have any overlaps\n# use can use some tricks with \"polygons\" to achieve these",
"_____no_output_____"
],
[
"###############################################################################################\n# task 1: create a polygon using shapefiles (using Chinese cities as an example)\n\n# read shapefiles for cities in mainland China\nos.chdir(\"/rds/projects/2018/maraisea-glu-01/Study/Research_Data/BTH/domain/gadm36_CHN_shp\")\nChina_cities = gpd.read_file(\"gadm36_CHN_2.shp\")\n\n# check the city list\nprint(China_cities['NAME_2'])\n\n# get the list of your target cities\nos.chdir(\"/rds/projects/2018/maraisea-glu-01/Study/Research_Data/BTH/domain/\")\nBTH_cities = pd.read_csv(\"2+26_cities.csv\")\nBTH_cities = list(BTH_cities['City'])\n\n# extract the shape (multi-polygon) for each city\nBTH_shapes = [China_cities[China_cities['NAME_2'] == city_name] for city_name in BTH_cities]\nprint(\"Number of city shapefiles:\",len(BTH_shapes))\n\n# combine shapefiles from all cities into a single shape\nBTH_shapes = gpd.GeoDataFrame(pd.concat(BTH_shapes, ignore_index=True))\n\n# check the shape for a certain city\nBTH_shapes['geometry'][0]\n\n# plot the combined shape for the target cities\nfrom shapely.ops import cascaded_union\nBTH_polygons = BTH_shapes['geometry']\nBTH_boundary = gpd.GeoSeries(cascaded_union(BTH_polygons))\nBTH_boundary.plot(color = 'red')\nplt.show()",
"_____no_output_____"
],
[
"###############################################################################################\n# task 2: derive the polygon for a grid centre with given resolutions (use GEOS-Chem model grids as the example)\ndef create_polygon(lon,lat,lon_res,lat_res):\n '''Input lon,lat,resolution of lon,lat in order. Then create the polygon for the target grid'''\n from shapely import geometry\n from shapely.ops import cascaded_union\n p1 = geometry.Point(lon-lon_res/2,lat-lat_res/2)\n p2 = geometry.Point(lon+lon_res/2,lat-lat_res/2)\n p3 = geometry.Point(lon+lon_res/2,lat+lat_res/2)\n p4 = geometry.Point(lon-lon_res/2,lat+lat_res/2)\n pointList = [p1, p2, p3, p4, p1]\n output_polygon = geometry.Polygon([[p.x, p.y] for p in pointList])\n output_polygon = gpd.GeoSeries(cascaded_union(poly))\n return output_polygon\n \n# based on this, you can also create your function to return a polygon using coordiantes of of data points. ",
"_____no_output_____"
],
[
"###############################################################################################\n# task 3: test if a polygon contains a certain point\nfrom shapely.geometry import Point\nfrom shapely.geometry.polygon import Polygon\n\npoint = Point(0.5, 0.5)\npolygon = Polygon([(0, 0), (0, 1), (1, 1), (1, 0)])\nprint(polygon.contains(point))",
"_____no_output_____"
],
[
"###############################################################################################\n# task 4: test if two polygons have any overlaps\nprint(polyon_A.intersects(polyon_B))\n\n# Part 2 Remarks:\n# these can be useful as sometimes we want to know which grid contain our target data points \n# or we may want to know if which grids are within the target domain\n# or we may want to know some details of data in a certain domain\n\n\n###############################################################################################",
"_____no_output_____"
]
]
] |
[
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a63e99185704c3e493e7b6825b8a66fbe54ef7e
| 5,096 |
ipynb
|
Jupyter Notebook
|
book-R/.ipynb_checkpoints/Rbasico-checkpoint.ipynb
|
cleuton/datascience
|
afbdc5a32bb3ce981b6616305e6a7da7a690bc7b
|
[
"Apache-2.0"
] | 29 |
2017-11-02T11:05:28.000Z
|
2022-03-01T13:52:48.000Z
|
book-R/.ipynb_checkpoints/Rbasico-checkpoint.ipynb
|
AhirtonLopes/datascience
|
d1a5349ddb18a6ec3d7c9cf3c7d00550c76e91ed
|
[
"Apache-2.0"
] | 2 |
2018-03-19T21:08:48.000Z
|
2020-05-22T09:40:35.000Z
|
book-R/.ipynb_checkpoints/Rbasico-checkpoint.ipynb
|
AhirtonLopes/datascience
|
d1a5349ddb18a6ec3d7c9cf3c7d00550c76e91ed
|
[
"Apache-2.0"
] | 18 |
2018-01-03T22:06:44.000Z
|
2022-02-25T17:47:47.000Z
| 21.145228 | 181 | 0.427002 |
[
[
[
"# Variáveis\n\n\n- Variável (estatística) - atributo, mensurável ou não, sujeito à variação quantitativa ou qualitativa, no interior de um conjunto\n- Variável (matemática) - ente, em geral representado por uma letra, que pode assumir diferentes valores numéricos em uma expressão algébrica, numa fórmula ou num algoritmo\n- Variável (programação) - objeto situado na memória que representa um valor ou expressão\n",
"_____no_output_____"
]
],
[
[
"y <- 2*x + 3",
"_____no_output_____"
],
[
"x <- 3",
"_____no_output_____"
],
[
"y <- 2*x + 3\nprint(y)",
"[1] 9\n"
]
],
[
[
"## Variáveis estatísticas",
"_____no_output_____"
]
],
[
[
"df_alunos <- read.csv('pesos-alturas.csv')",
"_____no_output_____"
],
[
"head(df_alunos)",
"_____no_output_____"
],
[
"summary(df_alunos)",
"_____no_output_____"
],
[
"sd(df_alunos$Pesos)",
"_____no_output_____"
],
[
"s <- sqrt(sum((df_alunos$Pesos-mean(df_alunos$Pesos))^2/(length(df_alunos$Pesos)-1)))\nprint(s)",
"[1] 9.078084\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
]
] |
4a63eaa791cf13153e5c3c21a2f45df35cf672b6
| 45,936 |
ipynb
|
Jupyter Notebook
|
labs/numpy.ipynb
|
hannahalberti/ml-class
|
25e4dbda07c70f08d39c0d60885e4ca9f396e9d9
|
[
"Apache-2.0"
] | 11 |
2020-08-26T21:26:42.000Z
|
2022-03-13T23:02:56.000Z
|
labs/numpy.ipynb
|
hannahalberti/ml-class
|
25e4dbda07c70f08d39c0d60885e4ca9f396e9d9
|
[
"Apache-2.0"
] | null | null | null |
labs/numpy.ipynb
|
hannahalberti/ml-class
|
25e4dbda07c70f08d39c0d60885e4ca9f396e9d9
|
[
"Apache-2.0"
] | 10 |
2020-08-27T05:35:27.000Z
|
2021-11-08T18:49:39.000Z
| 22.853731 | 400 | 0.48966 |
[
[
[
"# Task 1: Getting started with Numpy\n\nLet's spend a few minutes just learning some of the fundamentals of Numpy. (pronounced as num-pie **not num-pee**) \n\n### what is numpy\nNumpy is a Python library that support large, multi-dimensional arrays and matrices. \n\nLet's look at an example. Suppose we start with a little table:\n\n| a | b | c | d | e |\n| :---: | :---: | :---: | :---: | :---: |\n| 0 | 1 | 2 | 3 | 4 |\n|10| 11| 12 | 13 | 14|\n|20| 21 | 22 | 23 | 24 |\n|30 | 31 | 32 | 33 | 34 |\n|40 |41 | 42 | 43 | 44 |\n\nand I simply want to add 10 to each cell:\n\n| a | b | c | d | e |\n| :---: | :---: | :---: | :---: | :---: |\n| 10 | 11 | 12 | 13 | 14 |\n|20| 21| 22 | 23 | 24|\n|30| 31 | 32 | 33 | 34 |\n|40 | 41 | 42 | 43 | 44 |\n|50 |51 | 52 | 53 | 54 |\n\nTo make things interesting, instead of a a 5 x5 array, let's make it 1,000x1,000 -- so 1 million cells!\n\nFirst, let's construct it in generic Python",
"_____no_output_____"
]
],
[
[
"a = [[x + y * 1000 for x in range(1000)] for y in range(1000)]\n",
"_____no_output_____"
]
],
[
[
"Instead of glossing over the first code example in the course, take your time, go back, and parse it out so you understand it. Test it out and see what it looks like. For example, how would you change the example to make a 10x10 array called `a2`? execute the code here:",
"_____no_output_____"
]
],
[
[
"# TO DO",
"_____no_output_____"
]
],
[
[
"Now let's take a look at the value of a2:",
"_____no_output_____"
]
],
[
[
"a2",
"_____no_output_____"
]
],
[
[
"Now that we understand that line of code let's go on and write a function that will add 10 to each cell in our original 1000x1000 matrix.\n",
"_____no_output_____"
]
],
[
[
"def addToArr(sizeof):\n for i in range(sizeof):\n for j in range(sizeof):\n a[i][j] = a[i][j] + 10\n",
"_____no_output_____"
]
],
[
[
"As you can see, we iterate over the array with nested for loops. \n\nLet's take a look at how much time it takes to run that function:",
"_____no_output_____"
]
],
[
[
"%time addToArr(1000)",
"CPU times: user 145 ms, sys: 0 ns, total: 145 ms\nWall time: 143 ms\n"
]
],
[
[
"My results were:\n\n CPU times: user 145 ms, sys: 0 ns, total: 145 ms\n Wall time: 143 ms\n\nSo about 1/7 of a second. \n\n### Doing in using Numpy\nNow do the same using Numpy.\n\n\nWe can construct the array using\n \n arr = np.arange(1000000).reshape((1000,1000))\n\nNot sure what that line does? Numpy has great online documentation. [Documentation for np.arange](https://numpy.org/doc/stable/reference/generated/numpy.arange.html) says it \"Return evenly spaced values within a given interval.\" Let's try it out:",
"_____no_output_____"
]
],
[
[
"import numpy as np\nnp.arange(16)",
"_____no_output_____"
]
],
[
[
"So `np.arange(10)` creates a matrix of 16 sequential integers. [The documentation for reshape](https://numpy.org/doc/1.18/reference/generated/numpy.reshape.html) says, as the name suggests, \"Gives a new shape to an array without changing its data.\" Suppose we want to reshape our 1 dimensional matrix of 16 integers to a 4x4 one. we can do:",
"_____no_output_____"
]
],
[
[
"np.arange(16).reshape((4,4))",
"_____no_output_____"
]
],
[
[
"As you can see it is pretty easy to find documentation on Numpy.\n\nBack to our example of creating a 1000x1000 matrix, we now can time how long it takes to add 10 to each cell.\n\n %time arr = arr + 10\n \nLet's put this all together:",
"_____no_output_____"
]
],
[
[
"import numpy as np\narr = np.arange(1000000).reshape((1000,1000))\n%time arr = arr + 10",
"CPU times: user 1.36 ms, sys: 600 µs, total: 1.96 ms\nWall time: 1.81 ms\n"
]
],
[
[
"My results were\n\n CPU times: user 1.26 ms, sys: 408 µs, total: 1.67 ms\n Wall time: 1.68 ms\n\nSo, depending on your computer, somewhere around 25 to 100 times faster. **That is phenomenally faster!**\n\nAnd Numpy is even fast in creating arrays:\n\n#### the generic Python way",
"_____no_output_____"
]
],
[
[
"%time a = [[x + y * 1000 for x in range(1000)] for y in range(1000)]",
"CPU times: user 92.1 ms, sys: 11.5 ms, total: 104 ms\nWall time: 102 ms\n"
]
],
[
[
"My results were\n\n CPU times: user 92.1 ms, sys: 11.5 ms, total: 104 ms\n Wall time: 102 ms\n\n#### the Numpy way",
"_____no_output_____"
]
],
[
[
"%time arr = np.arange(1000000).reshape((1000,1000))",
"CPU times: user 0 ns, sys: 1.95 ms, total: 1.95 ms\nWall time: 1.1 ms\n"
]
],
[
[
"What are your results?\n\n<h3 style=\"color:red\">Q1. Speed</h3>\n<span style=\"color:red\">Suppose I want to create an array with 10,000 by 10,000 cells. Then I want to add 1 to each cell. How much time does this take using generic Python arrays and using Numpy arrays?</span>\n\n#### in Python\n(be patient -- this may take a number of seconds)",
"_____no_output_____"
],
[
"#### in Numpy",
"_____no_output_____"
],
[
"### built in functions\nIn addition to being faster, numpy has a wide range of built in functions. So, for example, instead of you writing code to calculate the mean or sum or standard deviation of a multidimensional array you can just use numpy:",
"_____no_output_____"
]
],
[
[
"arr.mean()",
"_____no_output_____"
],
[
"arr.sum()",
"_____no_output_____"
],
[
" arr.std()",
"_____no_output_____"
]
],
[
[
"So not only is it faster, but it minimizes the code you have to write. A win, win.\n\nLet's continue with some basics.",
"_____no_output_____"
],
[
"## numpy examined \nSo Numpy is a library containing a super-fast n-dimensional array object and a load of functions that can operate on those arrays. To use numpy, we must first load the library into our code and we do that with the statement:\n",
"_____no_output_____"
]
],
[
[
" import numpy as np",
"_____no_output_____"
]
],
[
[
"Perhaps most of you are saying \"fine, fine, I know this already\", but let me catch others up to speed. This is just one of several ways we can load a library into Python. We could just say:",
"_____no_output_____"
]
],
[
[
" import numpy",
"_____no_output_____"
]
],
[
[
"and everytime we need to use one of the functions built in\nto numpy we would need to preface that function with `numpy` . So for example, we could create an array with\n",
"_____no_output_____"
]
],
[
[
"arr = numpy.array([1, 2, 3, 4, 5])",
"_____no_output_____"
]
],
[
[
"If we got tired of writing `numpy` in front of every function, instead of typing",
"_____no_output_____"
]
],
[
[
"import numpy",
"_____no_output_____"
]
],
[
[
"we could write:",
"_____no_output_____"
]
],
[
[
"from numpy import *",
"_____no_output_____"
]
],
[
[
"(where that * means 'everything' and the whole expression means import everything from the numpy library). Now we can use any numpy function without putting numpy in front of it:",
"_____no_output_____"
]
],
[
[
"arr = array([1, 2, 3, 4, 5])",
"_____no_output_____"
]
],
[
[
"This may at first seem like a good idea, but it is considered bad form by Python developers. \n\nThe solution is to use what we initially introduced:",
"_____no_output_____"
]
],
[
[
" import numpy as np",
"_____no_output_____"
]
],
[
[
"this makes `np` an alias for numpy. so now we would put *np* in front of numpy functions.",
"_____no_output_____"
]
],
[
[
" arr = np.array([1, 2, 3, 4, 5])",
"_____no_output_____"
]
],
[
[
"Of course we could use anything as an alias for numpy:",
"_____no_output_____"
]
],
[
[
"import numpy as myCoolSneakers\narr = myCoolSneakers.array([1, 2, 3, 4, 5])\n",
"_____no_output_____"
]
],
[
[
"But it is convention among data scientists, machine learning experts, and the cool kids to use np. One big benefit of this convention is that it makes the code you write more understandable to others and vice versa (I don't need to be scouring your code to find out what `myCoolSneakers.array` does)\n\n## creating arrays\n\nAn Array in Numpy is called an `ndarray` for n-dimensional array. As we will see, they share some similarities with Python lists. We have already seen how to create one:",
"_____no_output_____"
]
],
[
[
"arr = np.array([1, 2, 3, 4, 5])",
"_____no_output_____"
]
],
[
[
"and to display what `arr` equals",
"_____no_output_____"
]
],
[
[
"arr",
"_____no_output_____"
]
],
[
[
"This is a one dimensional array. The position of an element in the array is called the index. The first element of the array is at index 0, the next at index 1 and so on. We can get the item at a particular index by using the syntax:",
"_____no_output_____"
]
],
[
[
" arr[0]",
"_____no_output_____"
],
[
"arr[3]",
"_____no_output_____"
]
],
[
[
"We can create a 2 dimensional array that looks like\n\n 10 20 30\n 40 50 60\n \nby:\n",
"_____no_output_____"
]
],
[
[
" arr = np.array([[10, 20, 30], [40, 50, 60]])",
"_____no_output_____"
]
],
[
[
"and we can show the contents of that array just be using the name of the array, `arr`\n",
"_____no_output_____"
]
],
[
[
"arr",
"_____no_output_____"
]
],
[
[
"We don't need to name arrays `arr`, we can name them anything we want. ",
"_____no_output_____"
]
],
[
[
"ratings = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])",
"_____no_output_____"
],
[
"ratings",
"_____no_output_____"
]
],
[
[
"So far, we've been creating numpy arrays by using Python lists. We can make that more explicit by first creating the Python list and then using it to create the ndarray:",
"_____no_output_____"
]
],
[
[
"pythonArray = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]\nsweet = np.array(pythonArray)\nsweet",
"_____no_output_____"
]
],
[
[
"We can also create an array of all zeros or all ones directly:",
"_____no_output_____"
]
],
[
[
"np.zeros(10)",
"_____no_output_____"
],
[
"np.ones((5, 2))",
"_____no_output_____"
]
],
[
[
"### indexing\nIndexing elements in ndarrays works pretty much the same as it does in Python. We have already seen one example, here is another example with a one dimensional array:\n",
"_____no_output_____"
]
],
[
[
"temperatures = np.array([48, 44, 37, 35, 32, 29, 33, 36, 42])\ntemperatures[0]",
"_____no_output_____"
],
[
"temperatures[3]",
"_____no_output_____"
]
],
[
[
"and a two dimensional one:",
"_____no_output_____"
]
],
[
[
"sample = np.array([[10, 20, 30], [40, 50, 60]])\nsample[0][1]",
"_____no_output_____"
]
],
[
[
"For numpy ndarrays we can also use a comma to separate the indices of multi-dimensional arrays:",
"_____no_output_____"
]
],
[
[
"sample[1,2]",
"_____no_output_____"
]
],
[
[
"And, like Python you can also get a slice of an array. First, here is the basic Python example:",
"_____no_output_____"
]
],
[
[
"a = [10, 20, 30, 40, 50, 60]\nb = a[1:4]\nb",
"_____no_output_____"
]
],
[
[
"and the similar numpy example:",
"_____no_output_____"
]
],
[
[
"aarr = np.array(a)\nbarr = aarr[1:4]\nbarr",
"_____no_output_____"
]
],
[
[
"### Something wacky to remember\nBut there is a difference between Python arrays and numpy ndarrays. If I alter the array `b` in Python the orginal `a` array is not altered:",
"_____no_output_____"
]
],
[
[
"b[1] = b[1] + 5",
"_____no_output_____"
],
[
"b",
"_____no_output_____"
],
[
"a",
"_____no_output_____"
]
],
[
[
"but if we do the same in numpy:",
"_____no_output_____"
]
],
[
[
"barr[1] = barr[1] + 5",
"_____no_output_____"
],
[
"barr",
"_____no_output_____"
],
[
"aarr",
"_____no_output_____"
]
],
[
[
"we see that the original array is altered since we modified the slice. This may seem wacky to you, or maybe it doesn't. In any case, it is something you will get used to. For now, just be aware of this. \n\n## functions on arrays\n\nNumpy has a wide range of array functons. Here is just a sample.\n\n### Unary functions\n\n#### absolute value",
"_____no_output_____"
]
],
[
[
"arr = np.array([-2, 12, -25, 0])\narr2 = np.abs(arr)\narr2",
"_____no_output_____"
],
[
"arr = np.array([[-2, 12], [-25, 0]])\narr2 = np.abs(arr)\narr2 ",
"_____no_output_____"
]
],
[
[
"#### square",
"_____no_output_____"
]
],
[
[
"arr = np.array([-1, 2, -3, 4])\narr2 = np.square(arr)\narr2",
"_____no_output_____"
]
],
[
[
"#### squareroot",
"_____no_output_____"
]
],
[
[
"arr = np.array([[4, 9], [16, 25]])\narr2 = np.sqrt(arr)\narr2",
"_____no_output_____"
]
],
[
[
"## Binary functions\n\n#### add /subtract / multiply / divide\n",
"_____no_output_____"
]
],
[
[
"arr1 = np.array([[10, 20], [30, 40]])\narr2 = np.array([[1, 2], [3, 4]])\nnp.add(arr1, arr2)",
"_____no_output_____"
],
[
"np.subtract(arr1, arr2)",
"_____no_output_____"
],
[
"np.multiply(arr1, arr2)",
"_____no_output_____"
],
[
"np.divide(arr1, arr2)",
"_____no_output_____"
]
],
[
[
"#### maximum / minimum\n",
"_____no_output_____"
]
],
[
[
"arr1 = np.array([[10, 2], [3, 40]])\narr2 = np.array([[1, 20], [30, 4]])\nnp.maximum(arr1, arr2)",
"_____no_output_____"
]
],
[
[
"#### these are just examples. There are more unary and binary functions",
"_____no_output_____"
],
[
"## Numpy Uber\nlets say I have Uber drivers at various intersections around Austin. I will represent that as a set of x,y coordinates.\n\n | Driver |xPos | yPos |\n | :---: | :---: | :---: |\n | Ann | 4 | 5 |\n | Clara | 6 | 6 |\n | Dora | 3 | 1 |\n | Erica | 9 | 5 |\n \n \n Now I would like to find the closest driver to a customer who is at 6, 3.\n And to further define *closest* I am going to use what is called **Manhattan Distance**. Roughly put, Manhattan distance is distance if you followed streets. Ann, for example, is two blocks West of our customer and two blocks north. So the Manhattan distance from Ann to our customer is `2+2` or `4`. \n \n First, to make things easy (and because the data in a numpy array must be of the same type), I will represent the x and y positions in one numpy array and the driver names in another:",
"_____no_output_____"
]
],
[
[
"locations = np.array([[4, 5], [6, 6], [3, 1], [9,5]])\nlocations",
"_____no_output_____"
],
[
"drivers = np.array([\"Ann\", \"Clara\", \"Dora\", \"Erica\"])",
"_____no_output_____"
]
],
[
[
"Our customer is at",
"_____no_output_____"
]
],
[
[
"cust = np.array([6, 3])",
"_____no_output_____"
]
],
[
[
"now we are going to figure out the distance between each of our drivers and the customer",
"_____no_output_____"
]
],
[
[
"xydiff = locations - cust\nxydiff",
"_____no_output_____"
]
],
[
[
"NOTE: displaying the results with `xydiff` isn't a necessary step. I just like seeing intermediate results.\n\nOk. now I am goint to sum the absolute values:",
"_____no_output_____"
]
],
[
[
"distances =np.abs(xydiff).sum(axis = 1)\ndistances",
"_____no_output_____"
]
],
[
[
"So the output is the array `[4, 3, 5, 5]` which shows that Ann is 4 away from our customer; Clara is 3 away and so on.\n\nNow I am going to sort these using `argsort`:",
"_____no_output_____"
]
],
[
[
"sorted = np.argsort(distances)\nsorted",
"_____no_output_____"
]
],
[
[
"`argsort` returns an array of sorted indices. So the element at position 1 is the smallest followed by the element at position 0 and so on.\n\nNext, I am going to get the first element of that array (in this case 1) and find the name of the driver at that position in the `drivers` array",
"_____no_output_____"
]
],
[
[
"drivers[sorted[0]]",
"_____no_output_____"
]
],
[
[
"<h3 style=\"color:red\">Q2. You Try</h3>\n<span style=\"color:red\">Can you put all the above in a function. that takes 3 arguments, the location array, the array containing the names of the drivers, and the array containing the location of the customer. It should return the name of the closest driver.</span>\n",
"_____no_output_____"
]
],
[
[
"def findDriver(distanceArr, driversArr, customerArr):\n result = ''\n ### put your code here\n return result\nprint(findDriver(locations, drivers, cust)) # this should return Clara",
"\n"
]
],
[
[
"### CONGRATULATIONS\n\nEven though this is just an intro to Numpy, I am going to throw some math at you. So far we have been looking at a two dimensional example, x and y (or North-South and East-West) and our distance formula for the distance, Dist between Ann, A and Customer C is\n\n$$ DIST_{AC} = |A_x - C_x | + |A_y - C_y | $$\n\nNow I am going to warp this a bit. In this example, each driver is represented by an array (as is the customer) So, Ann is represented by `[1,2]` and the customer by `[3,4]`. So Ann's 0th element is 1 and the customer's 0th element is 3. And, sorry, computer science people start counting at 0 but math people (and all other normal people) start at 1 so we can rewrite the above formula as:\n\n$$ DIST_{AC} = |A_1 - C_1 | + |A_2 - C_2 | $$\n\nThat's the distance formula for Ann and the Customer. We can make the formula by saying the distance between and two people, let's call them *x* and *y* is\n\n\n$$ DIST_{xy} = |x_1 - y_1 | + |x_2 - y_2 | $$\n\nThat is the formula for 2 dimensional Manhattan Distance. We can imagine a three dimensional case. \n\n$$ DIST_{xy} = |x_1 - y_1 | + |x_2 - y_2 | + |x_3 - y_3 | $$\n\nand we can generalize the formula to the n-dimensional case.\n \n$$ DIST_{xy}=\\sum_{i=1}^n |x_i - y_i| $$\n\nJust in time for a five dimensional example:\n\n\n# The Amazing 5D Music example\n\nGuests went into a listening booth and rated the following tunes:\n\n* [Janelle Monae Tightrope](https://www.youtube.com/watch?v=pwnefUaKCbc)\n* [Major Lazer - Cold Water](https://www.youtube.com/watch?v=nBtDsQ4fhXY)\n* [Tim McGraw - Humble & Kind](https://www.youtube.com/watch?v=awzNHuGqoMc)\n* [Maren Morris - My Church](https://www.youtube.com/watch?v=ouWQ25O-Mcg)\n* [Hailee Steinfeld - Starving](https://www.youtube.com/watch?v=xwjwCFZpdns)\n\n\nHere are the results:\n\n| Guest | Janelle Monae | Major Lazer | Tim McGraw | Maren Morris | Hailee Steinfeld| \n|---|---|---|---|---|---|\n| Ann | 4 | 5 | 2 | 1 | 3 |\n| Ben | 3 | 1 | 5 | 4 | 2|\n| Jordyn | 5 | 5 | 2 | 2 | 3|\n| Sam | 4 | 1 | 4 | 4 | 1|\n| Hyunseo | 1 | 1 | 5 | 4 | 1 |\n| Ahmed | 4 | 5 | 3 | 3 | 1 |\n\nSo Ann, for example, really liked Major Lazer and Janelle Monae but didn't care much for Maren Morris.\n\nLet's set up a few numpy arrays.\n",
"_____no_output_____"
]
],
[
[
"customers = np.array([[4, 5, 2, 1, 3],\n [3, 1, 5, 4, 2],\n [5, 5, 2, 2, 3],\n [4, 1, 4, 4, 1], \n [1, 1, 5, 4, 1],\n [4, 5, 3, 3, 1]])\n\ncustomerNames = np.array([\"Ann\", \"Ben\", 'Jordyn', \"Sam\", \"Hyunseo\", \"Ahmed\"])\n\n",
"_____no_output_____"
]
],
[
[
"Now let's set up a few new customers:",
"_____no_output_____"
]
],
[
[
"mikaela = np.array([3, 2, 4, 5, 4])\nbrandon = np.array([4, 5, 1, 2, 3])",
"_____no_output_____"
]
],
[
[
"Now we would like to determine which of our current customers is closest to Mikaela and which to Brandon.\n<h3 style=\"color:red\">Q3. You Try</h3>\n<span style=\"color:red\">Can you write a function findClosest that takes 3 arguments: customers, customerNames, and an array representing one customer's ratings and returns the name of the closest customer?</span>\n\nLet's break this down a bit.\n\n1. Which line in the Numpy Uber section above will create a new array which is the result of subtracting the Mikaela array from each row of the customers array resulting in\n\n```\narray([[ 1, 3, -2, -4, -1],\n [ 0, -1, 1, -1, -2],\n [ 2, 3, -2, -3, -1],\n [ 1, -1, 0, -1, -3],\n [-2, -1, 1, -1, -3],\n [ 1, 3, -1, -2, -3]])\n ```\n",
"_____no_output_____"
]
],
[
[
"# TODO\n",
"_____no_output_____"
]
],
[
[
"2. Which line above will take the array you created and generate a single integer distance for each row representing how far away that row is from Mikaela? The results will look like:\n\n```\n array([11, 5, 11, 6, 8, 10])\n```",
"_____no_output_____"
]
],
[
[
"# TO DO \n",
"_____no_output_____"
]
],
[
[
"Finally, we want a sorted array of indices, the zeroth element of that array will be the closest row to Mikaela, the next element will be the next closest and so on. The result should be\n\n```\narray([1, 3, 4, 5, 0, 2])\n```\n",
"_____no_output_____"
]
],
[
[
"# TO DO\n",
"_____no_output_____"
]
],
[
[
"Finally we need the name of the person that is the closest. ",
"_____no_output_____"
]
],
[
[
"# TO DO",
"_____no_output_____"
]
],
[
[
"Okay, time to put it all together. Can you combine all the code you wrote above to finish the following function? So x is the new person and we want to find the closest customer to x.",
"_____no_output_____"
]
],
[
[
"def findClosest(customers, customerNames, x):\n # TO DO\n return ''\n\n\nprint(findClosest(customers, customerNames, mikaela)) # Should print Ben\nprint(findClosest(customers, customerNames, brandon)) # Should print Ann",
"Ben\nAnn\n"
]
],
[
[
"## Numpy Amazon\n\nWe are going to start with the same array we did way up above:\n\n \n | Drone |xPos | yPos |\n | :---: | :---: | :---: |\n | wing_1a | 4 | 5 |\n | wing_2a | 6 | 6 |\n | wing_3a | 3 | 1 |\n | wing_4a | 9 | 5 |\n \n But this time, instead of Uber drivers, think of these as positions of [Alphabet's Wing delivery drones](https://wing.com/). \n Now we would like to find the closest drone to a customer who is at 7, 1.\n \nWith the previous example we used Manhattan Distance. With drones, we can compute the distance as the crow flies -- or Euclidean Distance. We probably learned how to do this way back in 7th grade when we learned the Pythagorean Theorem which states:\n\n$$c^2 = a^2 + b^2$$\n\nWhere *c* is the hypotenuse and *a* and *b* are the two other sides. So, if we want to find *c*:\n\n$$c = \\sqrt{a^2 + b^2}$$\n\n\nIf we want to find the distance between the drone and a customer, *x* and *y* in the formula becomes\n\n$$Dist_{xy} = \\sqrt{(x_1-y_1)^2 + (x_2-y_2)^2}$$\n\nand for `wing_1a` who is at `[4,5]` and our customer who is at `[7,1]` then the formula becomes:\n\n$$Dist_{xy} = \\sqrt{(x_1-y_1)^2 + (x_2-y_2)^2} = \\sqrt{(4-7)^2 + (5-1)^2} =\\sqrt{-3^2 + 4^2} = \\sqrt{9 + 16} = \\sqrt{25} = 5$$\n\nSweet! And to generalize this distance formula:\n\n$$Dist_{xy} = \\sqrt{(x_1-y_1)^2 + (x_2-y_2)^2}$$\n\nto n-dimensions:\n\n$$Dist_{xy} = \\sum_{i=1}^n{\\sqrt{(x_i-y_i)^2}}$$\n\n\n\n\n<h4 style=\"color:red\">Q4. You Try</h3>\n<span style=\"color:red\">Can you write a function euclidean that takes 3 arguments: droneLocation, droneNames, and an array representing one customer's position and returns the name of the closest drone?</span>\n\nFirst, a helpful hint:\n",
"_____no_output_____"
]
],
[
[
"arr = np.array([-1, 2, -3, 4])\narr2 = np.square(arr)\narr2",
"_____no_output_____"
],
[
"locations = np.array([[4, 5], [6, 6], [3, 1], [9,5]])\ndrivers = np.array([\"wing_1a\", \"wing_2a\", \"wing_3a\", \"wing_4a\"])\ncust = np.array([6, 3])\n\ndef euclidean(droneLocation, droneNames, x):\n result = ''\n ### your code here \n return result\neuclidean(locations, drivers, cust) ",
"_____no_output_____"
]
],
[
[
"<h4 style=\"color:red\">Q5. You Try</h3>\n<span style=\"color:red\">try your code on the \"Amazing 5D Music example. Does it return the same person or a different one?\"</span>",
"_____no_output_____"
]
],
[
[
"#TBD",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a63f26557f7abbd8f0cb2f9d0cfa00f8586161c
| 15,118 |
ipynb
|
Jupyter Notebook
|
3.3_numpy_functions.ipynb
|
butayama/Learning-Python-for-Data-Science
|
43f5e8e0b6f767f46687a4eb33d7530925dd9727
|
[
"MIT"
] | null | null | null |
3.3_numpy_functions.ipynb
|
butayama/Learning-Python-for-Data-Science
|
43f5e8e0b6f767f46687a4eb33d7530925dd9727
|
[
"MIT"
] | null | null | null |
3.3_numpy_functions.ipynb
|
butayama/Learning-Python-for-Data-Science
|
43f5e8e0b6f767f46687a4eb33d7530925dd9727
|
[
"MIT"
] | null | null | null | 19.112516 | 382 | 0.425718 |
[
[
[
"import numpy as np",
"_____no_output_____"
]
],
[
[
"# Create an array",
"_____no_output_____"
]
],
[
[
"np.arange(5)",
"_____no_output_____"
],
[
"np.arange(4,7)",
"_____no_output_____"
],
[
"np.arange(3, 22, 2)",
"_____no_output_____"
],
[
"np.linspace(0, 10, 100)",
"_____no_output_____"
],
[
"np.logspace(0,5,10)",
"_____no_output_____"
]
],
[
[
"# Reshaping an array",
"_____no_output_____"
]
],
[
[
"x = np.arange(12)\nx",
"_____no_output_____"
],
[
"x.reshape(4,3)",
"_____no_output_____"
],
[
"x.reshape(6,-1)",
"_____no_output_____"
],
[
"x.reshape(5, -1)",
"_____no_output_____"
]
],
[
[
"# Get the size and shape",
"_____no_output_____"
]
],
[
[
"x",
"_____no_output_____"
],
[
"x.size",
"_____no_output_____"
],
[
"x.shape",
"_____no_output_____"
],
[
"x.reshape(4,3).shape",
"_____no_output_____"
]
],
[
[
"# Check truth value of an array",
"_____no_output_____"
]
],
[
[
"x",
"_____no_output_____"
],
[
"x.any()",
"_____no_output_____"
],
[
"x.all()",
"_____no_output_____"
],
[
"np.array([]).any()",
"_____no_output_____"
]
],
[
[
"# Transpose",
"_____no_output_____"
]
],
[
[
"x.reshape(4,3)",
"_____no_output_____"
],
[
"x.reshape(4,3).T",
"_____no_output_____"
]
],
[
[
"# Maths",
"_____no_output_____"
]
],
[
[
"y = x.reshape(4,3)\ny",
"_____no_output_____"
],
[
"y + 5",
"_____no_output_____"
],
[
"z = np.arange(4).reshape(4,1)\nz",
"_____no_output_____"
],
[
"y",
"_____no_output_____"
],
[
"y + z",
"_____no_output_____"
]
],
[
[
"# Statistical Sumamry",
"_____no_output_____"
]
],
[
[
"x",
"_____no_output_____"
],
[
"x.sum()",
"_____no_output_____"
],
[
"x.mean()",
"_____no_output_____"
],
[
"x.max()",
"_____no_output_____"
],
[
"x.min()",
"_____no_output_____"
],
[
"x.std()",
"_____no_output_____"
],
[
"x.prod()",
"_____no_output_____"
],
[
"x[1:].prod()",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a64009f6393b97199e94ca1e8470ee8cb04a4ab
| 458,955 |
ipynb
|
Jupyter Notebook
|
benchmarking_visualization.ipynb
|
tobias17/CodeNamesOld
|
10fdccc35b5a23eb68ed5d7e7257e9fe83977278
|
[
"MIT"
] | null | null | null |
benchmarking_visualization.ipynb
|
tobias17/CodeNamesOld
|
10fdccc35b5a23eb68ed5d7e7257e9fe83977278
|
[
"MIT"
] | null | null | null |
benchmarking_visualization.ipynb
|
tobias17/CodeNamesOld
|
10fdccc35b5a23eb68ed5d7e7257e9fe83977278
|
[
"MIT"
] | null | null | null | 1,662.880435 | 82,288 | 0.958473 |
[
[
[
"from benchmark import Tracker\n\ntracker = Tracker()",
"_____no_output_____"
],
[
"tracker.load_from_file('benchmarks/b-stretch-3000.txt')",
"_____no_output_____"
],
[
"import matplotlib.pyplot as plt\nimport numpy as np\nfrom scipy.interpolate import make_interp_spline, BSpline",
"_____no_output_____"
],
[
"def get_data(index):\n index -= 1\n info = tracker.clue_infos[index]\n neutral_div = sum(info.neutral_dist_avg) / float(tracker.word_counts[index])\n negative_div = sum(info.negative_dist_avg) / float(tracker.word_counts[index])\n return [(dist, 'red') for dist in info.clue_dists] + [(info.neutral_dist_max, 'tan'),\n ([item / neutral_div for item in info.neutral_dist_avg], 'tan'),\n (info.negative_dist_max, 'blue'),\n ([item / negative_div for item in info.negative_dist_avg], 'blue'),\n (info.assassin_dist, 'black')]",
"_____no_output_____"
],
[
"plt.rcParams['figure.figsize'] = [20, 8]",
"_____no_output_____"
]
],
[
[
"# Clue Size Distribution:",
"_____no_output_____"
]
],
[
[
"plt.bar([str(index+1) for index in range(9)], tracker.word_counts)\nprint('n={}'.format(tracker.size()))\nplt.rcParams['figure.figsize'] = [20, 8]\nplt.show()",
"n=3000\n"
]
],
[
[
"# Clue to Word Similarities",
"_____no_output_____"
]
],
[
[
"cluster_size = 4\nclue_size_threshold = 15\nfor clue_count, n in [(i+1, size,) for i, size in enumerate(tracker.word_counts) if size >= clue_size_threshold]:\n old_data = get_data(clue_count)\n data = []\n for i, (item, color) in enumerate(old_data):\n item = [sum(item[i*cluster_size:(i+1)*cluster_size]) for i in range(int(100/cluster_size))]\n data.append((item, color,))\n largest_x = 0\n max_value = 0\n for item, color in data:\n if max(item) > max_value:\n max_value = max(item)\n for i, v in enumerate(item):\n if i > largest_x and v != 0:\n largest_x = i\n for item, color in data:\n plt.plot([float(i)/len(item) for i in range(len(item)+1)], item+[0], color=color)\n print('{} Clue{}, n={}'.format(clue_count, 's' if clue_count > 1 else '', n))\n plt.rcParams['figure.figsize'] = [20, 8]\n plt.show()",
"1 Clue, n=243\n"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a640d1bde308dd3c3b377d0ef6373540ae190a8
| 544,860 |
ipynb
|
Jupyter Notebook
|
intake-pangeo-catalog.ipynb
|
earthcube2020/ec20_banihirwe_etal
|
11b71119ddfdca97b195099ad9cded26d702c330
|
[
"CC-BY-4.0"
] | 1 |
2020-05-09T00:29:33.000Z
|
2020-05-09T00:29:33.000Z
|
intake-pangeo-catalog.ipynb
|
earthcube2020/ec20_banihirwe_etal
|
11b71119ddfdca97b195099ad9cded26d702c330
|
[
"CC-BY-4.0"
] | 2 |
2020-05-10T15:52:33.000Z
|
2021-01-11T18:39:14.000Z
|
intake-pangeo-catalog.ipynb
|
earthcube2020/ec20_banihirwe_etal
|
11b71119ddfdca97b195099ad9cded26d702c330
|
[
"CC-BY-4.0"
] | 1 |
2020-05-15T03:35:10.000Z
|
2020-05-15T03:35:10.000Z
| 346.603053 | 465,368 | 0.906273 |
[
[
[
"# Intake / Pangeo Catalog: Making It Easier To Consume Earth’s Climate and Weather Data\n\nAnderson Banihirwe ([email protected]), Charles Blackmon-Luca ([email protected]), Ryan Abernathey ([email protected]), Joseph Hamman ([email protected])\n\n- NCAR, Boulder, CO, USA\n- Columbia University, Palisades, NY, USA\n\n[2020 EarthCube Annual Meeting](https://www.earthcube.org/EC2020) ID: 133\n\n\n## Introduction\n\nComputer simulations of the Earth’s climate and weather generate huge amounts of data. These data are often persisted on high-performance computing (HPC) systems or in the cloud across multiple data assets in a variety of formats (netCDF, Zarr, etc.). \nFinding, investigating, and loading these data assets into compute-ready data containers costs time and effort. \nThe user should know what data are available and their associated metadata, preferably before loading a specific data asset and analyzing it. \n\nIn this notebook, we demonstrate [intake-esm](https://github.com/NCAR/intake-esm), a Python package and an [intake](https://github.com/intake/intake) plugin with aims of facilitating:\n\n- the discovery of earth's climate and weather datasets.\n- the ingestion of these datasets into [xarray](https://github.com/pydata/xarray) dataset containers.\n\nThe common/popular starting point for finding and investigating large datasets is with a data catalog. \nA *data catalog* is a collection of metadata, combined with search tools, that helps data analysts and other users to find the data they need. \nFor a user to take full advantage of intake-esm, they must point it to an *Earth System Model (ESM) data catalog*. \nThis is a JSON-formatted file that conforms to the ESM collection specification.\n\n## ESM Collection Specification\n\nThe [ESM collection specification](https://github.com/NCAR/esm-collection-spec) provides a machine-readable format for describing a wide range of climate and weather datasets, with a goal of making it easier to index and discover climate and weather data assets. \nAn asset is any netCDF/HDF file or Zarr store that contains relevant data.\n\nAn ESM data catalog serves as an inventory of available data, and provides information to explore the existing data assets. \nAdditionally, an ESM catalog can contain information on how to aggregate compatible groups of data assets into singular xarray datasets. \n\n## Use Case: CMIP6 hosted on Google Cloud\n\nThe Coupled Model Intercomparison Project (CMIP) is an international collaborative effort to improve the knowledge about climate change and its impacts on the Earth System and on our society. \n[CMIP began in 1995](https://www.wcrp-climate.org/wgcm-cmip), and today we are in its sixth phase (CMIP6). \nThe CMIP6 data archive consists of data models created across approximately 30 working groups and 1,000 researchers investigating the urgent environmental problem of climate change, and will provide a wealth of information for the next Assessment Report (AR6) of the [Intergovernmental Panel on Climate Change](https://www.ipcc.ch/) (IPCC).\n\nLast year, Pangeo partnered with Google Cloud to bring CMIP6 climate data to Google Cloud’s Public Datasets program. \nYou can read more about this process [here](https://cloud.google.com/blog/products/data-analytics/new-climate-model-data-now-google-public-datasets).\nFor the remainder of this section, we will demonstrate intake-esm's features using the ESM data catalog for the CMIP6 data stored on Google Cloud Storage. \nThis catalog resides [in a dedicated CMIP6 bucket](https://storage.googleapis.com/cmip6/pangeo-cmip6.json).\n\n### Loading an ESM data catalog\n\nTo load an ESM data catalog with intake-esm, the user must provide a valid ESM data catalog as input:",
"_____no_output_____"
]
],
[
[
"import warnings\nwarnings.filterwarnings(\"ignore\")\n\nimport intake\n\ncol = intake.open_esm_datastore('https://storage.googleapis.com/cmip6/pangeo-cmip6.json')\ncol",
"_____no_output_____"
]
],
[
[
"The summary above tells us that this catalog contains over 268,000 data assets.\nWe can get more information on the individual data assets contained in the catalog by calling the underlying dataframe created when it is initialized:",
"_____no_output_____"
]
],
[
[
"col.df.head()",
"_____no_output_____"
]
],
[
[
"The first data asset listed in the catalog contains:\n\n- the ambient aerosol optical thickness at 550nm (`variable_id='od550aer'`), as a function of latitude, longitude, time,\n- in an individual climate model experiment with the Taiwan Earth System Model 1.0 model (`source_id='TaiESM1'`),\n- forced by the *Historical transient with SSTs prescribed from historical* experiment (`experiment_id='histSST'`),\n- developed by the Taiwan Research Center for Environmental Changes (`instution_id='AS-RCEC'`),\n- run as part of the Aerosols and Chemistry Model Intercomparison Project (`activity_id='AerChemMIP'`)\n\nAnd is located in Google Cloud Storage at `gs://cmip6/AerChemMIP/AS-RCEC/TaiESM1/histSST/r1i1p1f1/AERmon/od550aer/gn/`.\n\nNote: the amount of details provided in the catalog is determined by the data provider who builds the catalog. \n\n### Searching for datasets\n\nAfter exploring the [CMIP6 controlled vocabulary](https://github.com/WCRP-CMIP/CMIP6_CVs), it’s straightforward to get the data assets you want using intake-esm's `search()` method. In the example below, we are are going to search for the following:\n\n- variables: `tas` which stands for near-surface air temperature\n- experiments: `['historical', 'ssp245', 'ssp585']`: \n - `historical`: all forcing of the recent past.\n - `ssp245`: update of [RCP4.5](https://en.wikipedia.org/wiki/Representative_Concentration_Pathway) based on SSP2.\n - `ssp585`: emission-driven [RCP8.5](https://en.wikipedia.org/wiki/Representative_Concentration_Pathway) based on SSP5.\n- table_id: `Amon` which stands for Monthly atmospheric data.\n- grid_label: `gr` which stands for regridded data reported on the data provider's preferred target grid.\n \nFor more details on the CMIP6 vocabulary, please check this [website](http://clipc-services.ceda.ac.uk/dreq/index.html).",
"_____no_output_____"
]
],
[
[
"# form query dictionary\nquery = dict(experiment_id=['historical', 'ssp245', 'ssp585'],\n table_id='Amon',\n variable_id=['tas'],\n member_id = 'r1i1p1f1',\n grid_label='gr')\n\n# subset catalog and get some metrics grouped by 'source_id'\ncol_subset = col.search(require_all_on=['source_id'], **query)\ncol_subset.df.groupby('source_id')[['experiment_id', 'variable_id', 'table_id']].nunique()",
"_____no_output_____"
]
],
[
[
"### Loading datasets\n\nOnce you've identified data assets of interest, you can load them into xarray dataset containers using the `to_dataset_dict()` method. Invoking this method yields a Python dictionary of high-level aggregated xarray datasets. \nThe logic for merging/concatenating the query results into higher level xarray datasets is provided in the input JSON file, under `aggregation_control`:",
"_____no_output_____"
],
[
"```json\n\n\"aggregation_control\": {\n \"variable_column_name\": \"variable_id\",\n \"groupby_attrs\": [\n \"activity_id\",\n \"institution_id\",\n \"source_id\",\n \"experiment_id\",\n \"table_id\",\n \"grid_label\"\n ],\n \"aggregations\": [{\n \"type\": \"union\",\n \"attribute_name\": \"variable_id\"\n },\n\n {\n \"type\": \"join_new\",\n \"attribute_name\": \"member_id\",\n \"options\": {\n \"coords\": \"minimal\",\n \"compat\": \"override\"\n }\n },\n {\n \"type\": \"join_new\",\n \"attribute_name\": \"dcpp_init_year\",\n \"options\": {\n \"coords\": \"minimal\",\n \"compat\": \"override\"\n }\n }\n ]\n}\n\n```",
"_____no_output_____"
],
[
"Though these aggregation specifications are sufficient to merge individual data assets into xarray datasets, sometimes additional arguments must be provided depending on the format of the data assets.\nFor example, Zarr-based assets can be loaded with the option `consolidated=True`, which relies on a consolidated metadata file to describe the assets with minimal data egress.",
"_____no_output_____"
]
],
[
[
"dsets = col_subset.to_dataset_dict(zarr_kwargs={'consolidated': True}, storage_options={'token': 'anon'})\n\n# list all merged datasets\n[key for key in dsets.keys()]",
"\n--> The keys in the returned dictionary of datasets are constructed as follows:\n\t'activity_id.institution_id.source_id.experiment_id.table_id.grid_label'\n"
]
],
[
[
"When the datasets have finished loading, we can extract any of them like we would a value in a Python dictionary:",
"_____no_output_____"
]
],
[
[
"ds = dsets['ScenarioMIP.THU.CIESM.ssp585.Amon.gr']\nds",
"_____no_output_____"
],
[
"# Let’s create a quick plot for a slice of the data:\nds.tas.isel(time=range(1, 1000, 90))\\\n .plot(col=\"time\", col_wrap=4, robust=True)",
"_____no_output_____"
]
],
[
[
"## Pangeo Catalog\n\nPangeo Catalog is an open-source project to enumerate and organize cloud-optimized climate data stored across a variety of providers. \nIn addition to offering various useful climate datasets in a consolidated location, the project also serves as a means of accessing public ESM data catalogs.\n\n### Accessing catalogs using Python\n\nAt the core of the project is a [GitHub repository](https://github.com/pangeo-data/pangeo-datastore) containing several static intake catalogs in the form of YAML files.\nThanks to plugins like intake-esm and [intake-xarray](https://github.com/intake/intake-xarray), these catalogs can contain links to ESM data catalogs or data assets that can be loaded into xarray datasets, along with the arguments required to load them.\n\nBy editing these files using Git-based version control, anyone is free to contribute a dataset supported by the available [intake plugins](https://intake.readthedocs.io/en/latest/plugin-directory.html).\nUsers can then browse these catalogs by providing their associated URL as input into intake's `open_catalog()`; their tree-like structure allows a user to explore their entirety by simply opening the [root catalog](https://github.com/pangeo-data/pangeo-datastore/blob/master/intake-catalogs/master.yaml) and recursively walking through it:",
"_____no_output_____"
]
],
[
[
"cat = intake.open_catalog('https://raw.githubusercontent.com/pangeo-data/pangeo-datastore/master/intake-catalogs/master.yaml')\nentries = cat.walk(depth=5)\n\n[key for key in entries.keys()]",
"_____no_output_____"
]
],
[
[
"The catalogs can also be explored using intake's own `search()` method:",
"_____no_output_____"
]
],
[
[
"cat_subset = cat.search('cmip6')\n\nlist(cat_subset)",
"_____no_output_____"
]
],
[
[
"Once we have found a dataset or collection we want to explore, we can do so without the need of any user inputted argument:",
"_____no_output_____"
]
],
[
[
"cat.climate.tracmip()",
"_____no_output_____"
]
],
[
[
"### Accessing catalogs using catalog.pangeo.io\n\nFor those who don't want to initialize a Python environmemt to explore the catalogs, [catalog.pangeo.io](https://catalog.pangeo.io/) offers a means of viewing them from a standalone web application.\nThe website directly mirrors the catalogs in the GitHub repository, with previews of each dataset or collection loaded on the fly:\n\n<img src=\"images/pangeo-catalog.png\" alt=\"Example of an intake-esm collection on catalog.pangeo.io\" width=\"1000\">\n\nFrom here, users can view the JSON input associated with an ESM collection and sort/subset its contents:\n\n<img src=\"images/esm-demo.gif\" alt=\"Example of an intake-esm collection on catalog.pangeo.io\" width=\"800\">\n\n## Conclusion\n\nWith intake-esm, much of the toil associated with discovering, loading, and consolidating data assets can be eliminated.\nIn addition to making computations on huge datasets more accessible to the scientific community, the package also promotes reproducibility by providing simple methodology to create consistent datasets.\nCoupled with Pangeo Catalog (which in itself is powered by intake), intake-esm gives climate scientists the means to create and distribute large data collections with instructions on how to use them essentially written into their ESM specifications.\n\nThere is still much work to be done with respect to intake-esm and Pangeo Catalog; in particular, goals include:\n\n- Merging ESM collection specifications into [SpatioTemporal Asset Catalog (STAC) specification](https://stacspec.org/) to offer a more universal specification standard\n- Development of tools to verify and describe catalogued data on a regular basis\n- Restructuring of catalogs to allow subsetting by cloud provider region\n\n[Please reach out](https://discourse.pangeo.io/) if you are interested in participating in any way.\n\n## References\n\n- [intake-esm documentation](https://intake-esm.readthedocs.io/en/latest/)\n- [intake documentation](https://intake.readthedocs.io/en/latest/)\n- [Pangeo Catalog on GitHub](https://github.com/pangeo-data/pangeo-datastore)\n- [Pangeo documentation](http://pangeo.io/)\n- [A list of existing, \"known\" catalogs](https://intake-esm.readthedocs.io/en/latest/faq.html#is-there-a-list-of-existing-catalogs)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
4a6421375a24cc5909cd8190639650fa68365ac6
| 2,372 |
ipynb
|
Jupyter Notebook
|
Chapter 01 - Introduction/Chapter1.ipynb
|
m7adeel/Practical-Discrete-Mathematics
|
c658728639fc9a8ef9a9f9a3640cf96beca13852
|
[
"MIT"
] | 36 |
2021-03-24T07:03:13.000Z
|
2022-03-23T04:20:33.000Z
|
Chapter 01 - Introduction/Chapter1.ipynb
|
bestcourses-ai/Practical-Discrete-Mathematics-with-Python
|
7637b52c48f2eb74a1c96511b1c9c9a5b7375e1c
|
[
"MIT"
] | null | null | null |
Chapter 01 - Introduction/Chapter1.ipynb
|
bestcourses-ai/Practical-Discrete-Mathematics-with-Python
|
7637b52c48f2eb74a1c96511b1c9c9a5b7375e1c
|
[
"MIT"
] | 18 |
2021-03-15T09:41:13.000Z
|
2022-03-23T04:21:35.000Z
| 23.029126 | 211 | 0.495363 |
[
[
[
"# Chapter 1 - Introduction: Key Concepts, Notation, Set Theory, Relations, and Functions\n\nThis notebook contains code accompanying Chapter 1 Introduction: Key Concepts, Notation, Set Theory, Relations, and Functions in *Practical Discrete Mathematics* by Ryan T. White and Archana Tikayat Ray\n\n## Functions and Relations\n\n### The `sort()` function",
"_____no_output_____"
]
],
[
[
"numbers = [3, 1, 4, 12, 8, 5, 2, 9]\nnames = ['Wyatt', 'Brandon', 'Kumar', 'Eugene', 'Elise']\n\n# Apply the sort() function to the lists\nnumbers.sort()\nnames.sort()\n\n# Display the output\nprint(numbers)\nprint(names)",
"[1, 2, 3, 4, 5, 8, 9, 12]\n['Brandon', 'Elise', 'Eugene', 'Kumar', 'Wyatt']\n"
]
],
[
[
"### The `shuffle()` function",
"_____no_output_____"
]
],
[
[
"import random\n\n# Set a random seed so the code is reproducible\nrandom.seed(1)\n\n# Run the random.shuffle() function 5 times and display the outputs\nfor i in range(0,5):\n numbers = [3, 1, 4, 12, 8, 5, 2, 9]\n random.shuffle(numbers)\n print(numbers)",
"[12, 2, 1, 5, 9, 3, 8, 4]\n[4, 2, 8, 3, 1, 12, 5, 9]\n[4, 5, 1, 9, 3, 8, 12, 2]\n[9, 12, 2, 8, 5, 3, 4, 1]\n[9, 8, 4, 3, 12, 1, 5, 2]\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a6429170ec3a0facb8a35f3343cf1b46bbe5656
| 4,348 |
ipynb
|
Jupyter Notebook
|
Chapter 08/sklearn_custom/generic_estimator/Scikit-learn on Boston Housing Dataset - Train with Generic Estimator.ipynb
|
amikewatson/Learn-Amazon-SageMaker-second-edition
|
64955fd96a5917d8d4d5e18a6dfc57a5432250be
|
[
"MIT"
] | 15 |
2021-10-01T02:36:24.000Z
|
2022-03-02T23:37:04.000Z
|
Chapter 08/sklearn_custom/generic_estimator/Scikit-learn on Boston Housing Dataset - Train with Generic Estimator.ipynb
|
amikewatson/Learn-Amazon-SageMaker-second-edition
|
64955fd96a5917d8d4d5e18a6dfc57a5432250be
|
[
"MIT"
] | null | null | null |
Chapter 08/sklearn_custom/generic_estimator/Scikit-learn on Boston Housing Dataset - Train with Generic Estimator.ipynb
|
amikewatson/Learn-Amazon-SageMaker-second-edition
|
64955fd96a5917d8d4d5e18a6dfc57a5432250be
|
[
"MIT"
] | 14 |
2021-10-30T14:21:43.000Z
|
2022-03-11T02:14:28.000Z
| 27.871795 | 166 | 0.558418 |
[
[
[
"%%sh\npygmentize sklearn-boston-housing-generic.py",
"_____no_output_____"
],
[
"%%sh\npygmentize Dockerfile",
"_____no_output_____"
],
[
"%%sh\nexport REGION=eu-west-1\naws ecr create-repository --repository-name sklearn-custom --region $REGION",
"_____no_output_____"
],
[
"%%sh\n# This cell will not run on SageMaker Studio\n# The simplest option is to run these commands on your local machine\nexport REGION=eu-west-1\nexport ACCOUNT_ID=`aws sts get-caller-identity --query Account --output text`\ndocker build -t sklearn-custom:estimator -f Dockerfile .\nexport IMAGE_ID=`docker images -q sklearn-custom:estimator`\ndocker tag $IMAGE_ID $ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/sklearn-custom:estimator\naws ecr get-login-password --region $REGION | docker login --username AWS --password-stdin $ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/sklearn-custom:estimator\ndocker push $ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/sklearn-custom:estimator",
"_____no_output_____"
],
[
"import sagemaker\nfrom sagemaker.estimator import Estimator\n\nprint(sagemaker.__version__)\n\nsess = sagemaker.Session()\naccount_id = sess.boto_session.client('sts').get_caller_identity()['Account']\nregion = sess.boto_session.region_name\n\nbucket = sess.default_bucket() \nprefix = 'sklearn-boston-housing'\n\ntraining = sess.upload_data(path='../housing.csv', key_prefix=prefix + \"/training\")\noutput = 's3://{}/{}/output/'.format(bucket,prefix)\n\nrole = sagemaker.get_execution_role()\n\nsk = Estimator(\n image_uri=account_id+'.dkr.ecr.'+region+'.amazonaws.com/sklearn-custom:estimator',\n role=role,\n instance_count=1, \n instance_type='ml.m5.large',\n output_path=output,\n hyperparameters={\n 'normalize': True,\n 'test-size': 0.1,\n 'random-state': 123\n }\n)\n\nsk.fit({'training':training})",
"_____no_output_____"
],
[
"sk_predictor = sk.deploy(instance_type='ml.t2.medium',\n initial_instance_count=1)",
"_____no_output_____"
],
[
"test_samples = ['0.00632,18.00,2.310,0,0.5380,6.5750,65.20,4.0900,1,296.0,15.30,4.98',\n '0.02731,0.00,7.070,0,0.4690,6.4210,78.90,4.9671,2,242.0,17.80,9.14']\n\nsk_predictor.serializer = sagemaker.serializers.CSVSerializer()\n\nresponse = sk_predictor.predict(test_samples)\nprint(response)",
"_____no_output_____"
],
[
"sk_predictor.delete_endpoint()",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a64354fdf32110f8d3936b4e361487889c215dc
| 654,921 |
ipynb
|
Jupyter Notebook
|
notebooks/mmcg_parameter_test.ipynb
|
ANL-DIGR/MMCG-Optimization
|
b57ed1797a75b9f4e0ebc05d8f2e26c8e476b1b0
|
[
"BSD-3-Clause"
] | 1 |
2020-06-26T11:15:01.000Z
|
2020-06-26T11:15:01.000Z
|
notebooks/mmcg_parameter_test.ipynb
|
zssherman/mmcg-optimization
|
59a7c6914e4dde6c54dfa83e11ca82b32147d5a7
|
[
"BSD-3-Clause"
] | null | null | null |
notebooks/mmcg_parameter_test.ipynb
|
zssherman/mmcg-optimization
|
59a7c6914e4dde6c54dfa83e11ca82b32147d5a7
|
[
"BSD-3-Clause"
] | 2 |
2018-09-04T16:41:04.000Z
|
2019-02-22T16:48:07.000Z
| 1,393.448936 | 317,896 | 0.957937 |
[
[
[
"import matplotlib.pyplot as plt\nimport numpy as np\nimport pyart\nimport scipy",
"_____no_output_____"
],
[
"radar = pyart.io.read('/home/zsherman/cmac_test_radar.nc')",
"/home/zsherman/dev/pyart/pyart/io/cfradial.py:376: RuntimeWarning: invalid value encountered in less\n data = self.ncvar[:]\n/home/zsherman/dev/pyart/pyart/io/cfradial.py:376: RuntimeWarning: invalid value encountered in greater\n data = self.ncvar[:]\n"
],
[
"radar.fields.keys()",
"_____no_output_____"
],
[
"max_lat = 37\nmin_lat = 36\nmin_lon = -98.3\nmax_lon = -97\nlal = np.arange(min_lat, max_lat, .2)\nlol = np.arange(min_lon, max_lon, .2)\n\ndisplay = pyart.graph.RadarMapDisplay(radar)\nfig = plt.figure(figsize=[10, 8])\ndisplay.plot_ppi_map('reflectivity', sweep=0, resolution='c',\n vmin=-8, vmax=64, mask_outside=False,\n cmap=pyart.graph.cm.NWSRef,\n min_lat=min_lat, min_lon=min_lon,\n max_lat=max_lat, max_lon=max_lon,\n lat_lines=lal, lon_lines=lol)\n# plt.savefig('')",
"_____no_output_____"
],
[
"print(radar.fields['gate_id']['notes'])\ncat_dict = {}\nfor pair_str in radar.fields['gate_id']['notes'].split(','):\n print(pair_str)\n cat_dict.update(\n {pair_str.split(':')[1]:int(pair_str.split(':')[0])})",
"0:multi_trip,1:rain,2:snow,3:no_scatter,4:melting,5:clutter\n0:multi_trip\n1:rain\n2:snow\n3:no_scatter\n4:melting\n5:clutter\n"
],
[
"happy_gates = pyart.correct.GateFilter(radar)\nhappy_gates.exclude_all()\nhappy_gates.include_equal('gate_id', cat_dict['rain'])\nhappy_gates.include_equal('gate_id', cat_dict['melting'])\nhappy_gates.include_equal('gate_id', cat_dict['snow'])",
"_____no_output_____"
],
[
"max_lat = 37\nmin_lat = 36\nmin_lon = -98.3\nmax_lon = -97\nlal = np.arange(min_lat, max_lat, .2)\nlol = np.arange(min_lon, max_lon, .2)\n\ndisplay = pyart.graph.RadarMapDisplay(radar)\nfig = plt.figure(figsize=[10, 8])\ndisplay.plot_ppi_map('reflectivity', sweep=1, resolution='c',\n vmin=-8, vmax=64, mask_outside=False,\n cmap=pyart.graph.cm.NWSRef,\n min_lat=min_lat, min_lon=min_lon,\n max_lat=max_lat, max_lon=max_lon,\n lat_lines=lal, lon_lines=lol,\n gatefilter=happy_gates)\n# plt.savefig('')",
"_____no_output_____"
],
[
"grids1 = pyart.map.grid_from_radars(\n (radar, ), grid_shape=(46, 251, 251),\n grid_limits=((0, 15000.0), (-50000, 50000), (-50000, 50000)),\n fields=list(radar.fields.keys()), gridding_algo=\"map_gates_to_grid\",\n weighting_function='BARNES', gatefilters=(happy_gates, ),\n map_roi=True, toa=17000.0, copy_field_data=True, algorithm='kd_tree',\n leafsize=10., roi_func='dist_beam', constant_roi=500.,\n z_factor=0.05, xy_factor=0.02, min_radius=500.0,\n h_factor=1.0, nb=1.5, bsp=1.0,)",
"_____no_output_____"
],
[
"display = pyart.graph.GridMapDisplay(grids1)\nfig = plt.figure(figsize=[15, 7])\n\n# Panel sizes.\nmap_panel_axes = [0.05, 0.05, .4, .80]\nx_cut_panel_axes = [0.55, 0.10, .4, .25]\ny_cut_panel_axes = [0.55, 0.50, .4, .25]\n\n# Parameters.\nlevel = 3\nvmin = -8\nvmax = 64\nlat = 36.5\nlon = -97.7\n\n# Panel 1, basemap, radar reflectivity and NARR overlay.\nax1 = fig.add_axes(map_panel_axes)\ndisplay.plot_basemap(lon_lines = np.arange(-104, -93, 2))\ndisplay.plot_grid('reflectivity', level=level, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\ndisplay.plot_crosshairs(lon=lon, lat=lat)\n\n# Panel 2, longitude slice.\nax2 = fig.add_axes(x_cut_panel_axes)\ndisplay.plot_longitude_slice('reflectivity', lon=lon, lat=lat, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\nax2.set_ylim([0, 15])\nax2.set_xlim([-50, 50])\nax2.set_xlabel('Distance from SGP CF (km)')\n\n# Panel 3, latitude slice.\nax3 = fig.add_axes(y_cut_panel_axes)\nax3.set_ylim([0, 15])\nax3.set_xlim([-50, 50])\ndisplay.plot_latitude_slice('reflectivity', lon=lon, lat=lat, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\n# plt.savefig('')",
"/home/zsherman/anaconda3/envs/pyart/lib/python3.6/site-packages/mpl_toolkits/basemap/__init__.py:4750: DeprecationWarning: The truth value of an empty array is ambiguous. Returning False, but in future this will result in an error. Use `array.size > 0` to check that an array is not empty.\n if fix_wrap_around and itemindex:\n"
],
[
"grids2 = pyart.map.grid_from_radars(\n (radar, ), grid_shape=(46, 251, 251),\n grid_limits=((0, 15000.0), (-50000, 50000), (-50000, 50000)),\n fields=list(radar.fields.keys()), gridding_algo=\"map_gates_to_grid\",\n weighting_function='BARNES', gatefilters=(happy_gates, ),\n map_roi=True, toa=17000.0, copy_field_data=True, algorithm='kd_tree',\n leafsize=10., roi_func='dist_beam', constant_roi=500.,\n z_factor=0.05, xy_factor=0.02, min_radius=500.0,\n h_factor=1.0, nb=1.5, bsp=1.0,)",
"_____no_output_____"
],
[
"display = pyart.graph.GridMapDisplay(grids)\nfig = plt.figure(figsize=[15, 7])\n\n# Panel sizes.\nmap_panel_axes = [0.05, 0.05, .4, .80]\nx_cut_panel_axes = [0.55, 0.10, .4, .25]\ny_cut_panel_axes = [0.55, 0.50, .4, .25]\n\n# Parameters.\nlevel = 3\nvmin = -8\nvmax = 64\nlat = 36.5\nlon = -97.7\n\n# Panel 1, basemap, radar reflectivity and NARR overlay.\nax1 = fig.add_axes(map_panel_axes)\ndisplay.plot_basemap(lon_lines = np.arange(-104, -93, 2))\ndisplay.plot_grid('reflectivity', level=level, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\ndisplay.plot_crosshairs(lon=lon, lat=lat)\n\n# Panel 2, longitude slice.\nax2 = fig.add_axes(x_cut_panel_axes)\ndisplay.plot_longitude_slice('reflectivity', lon=lon, lat=lat, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\nax2.set_ylim([0, 15])\nax2.set_xlim([-50, 50])\nax2.set_xlabel('Distance from SGP CF (km)')\n\n# Panel 3, latitude slice.\nax3 = fig.add_axes(y_cut_panel_axes)\nax3.set_ylim([0, 15])\nax3.set_xlim([-50, 50])\ndisplay.plot_latitude_slice('reflectivity', lon=lon, lat=lat, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\n# plt.savefig('')",
"_____no_output_____"
],
[
"grids3 = pyart.map.grid_from_radars(\n (radar, ), grid_shape=(46, 251, 251),\n grid_limits=((0, 15000.0), (-50000, 50000), (-50000, 50000)),\n fields=list(radar.fields.keys()), gridding_algo=\"map_gates_to_grid\",\n weighting_function='BARNES', gatefilters=(happy_gates, ),\n map_roi=True, toa=17000.0, copy_field_data=True, algorithm='kd_tree',\n leafsize=10., roi_func='dist_beam', constant_roi=500.,\n z_factor=0.05, xy_factor=0.02, min_radius=500.0,\n h_factor=1.0, nb=1.5, bsp=1.0,)",
"_____no_output_____"
],
[
"display = pyart.graph.GridMapDisplay(grids)\nfig = plt.figure(figsize=[15, 7])\n\n# Panel sizes.\nmap_panel_axes = [0.05, 0.05, .4, .80]\nx_cut_panel_axes = [0.55, 0.10, .4, .25]\ny_cut_panel_axes = [0.55, 0.50, .4, .25]\n\n# Parameters.\nlevel = 3\nvmin = -8\nvmax = 64\nlat = 36.5\nlon = -97.7\n\n# Panel 1, basemap, radar reflectivity and NARR overlay.\nax1 = fig.add_axes(map_panel_axes)\ndisplay.plot_basemap(lon_lines = np.arange(-104, -93, 2))\ndisplay.plot_grid('reflectivity', level=level, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\ndisplay.plot_crosshairs(lon=lon, lat=lat)\n\n# Panel 2, longitude slice.\nax2 = fig.add_axes(x_cut_panel_axes)\ndisplay.plot_longitude_slice('reflectivity', lon=lon, lat=lat, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\nax2.set_ylim([0, 15])\nax2.set_xlim([-50, 50])\nax2.set_xlabel('Distance from SGP CF (km)')\n\n# Panel 3, latitude slice.\nax3 = fig.add_axes(y_cut_panel_axes)\nax3.set_ylim([0, 15])\nax3.set_xlim([-50, 50])\ndisplay.plot_latitude_slice('reflectivity', lon=lon, lat=lat, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\n# plt.savefig('')",
"_____no_output_____"
],
[
"grids4 = pyart.map.grid_from_radars(\n (radar, ), grid_shape=(46, 251, 251),\n grid_limits=((0, 15000.0), (-50000, 50000), (-50000, 50000)),\n fields=list(radar.fields.keys()), gridding_algo=\"map_gates_to_grid\",\n weighting_function='BARNES', gatefilters=(happy_gates, ),\n map_roi=True, toa=17000.0, copy_field_data=True, algorithm='kd_tree',\n leafsize=10., roi_func='dist_beam', constant_roi=500.,\n z_factor=0.05, xy_factor=0.02, min_radius=500.0,\n h_factor=1.0, nb=1.5, bsp=1.0,)",
"_____no_output_____"
],
[
"display = pyart.graph.GridMapDisplay(grids)\nfig = plt.figure(figsize=[15, 7])\n\n# Panel sizes.\nmap_panel_axes = [0.05, 0.05, .4, .80]\nx_cut_panel_axes = [0.55, 0.10, .4, .25]\ny_cut_panel_axes = [0.55, 0.50, .4, .25]\n\n# Parameters.\nlevel = 3\nvmin = -8\nvmax = 64\nlat = 36.5\nlon = -97.7\n\n# Panel 1, basemap, radar reflectivity and NARR overlay.\nax1 = fig.add_axes(map_panel_axes)\ndisplay.plot_basemap(lon_lines = np.arange(-104, -93, 2))\ndisplay.plot_grid('reflectivity', level=level, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\ndisplay.plot_crosshairs(lon=lon, lat=lat)\n\n# Panel 2, longitude slice.\nax2 = fig.add_axes(x_cut_panel_axes)\ndisplay.plot_longitude_slice('reflectivity', lon=lon, lat=lat, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\nax2.set_ylim([0, 15])\nax2.set_xlim([-50, 50])\nax2.set_xlabel('Distance from SGP CF (km)')\n\n# Panel 3, latitude slice.\nax3 = fig.add_axes(y_cut_panel_axes)\nax3.set_ylim([0, 15])\nax3.set_xlim([-50, 50])\ndisplay.plot_latitude_slice('reflectivity', lon=lon, lat=lat, vmin=vmin, vmax=vmax,\n cmap=pyart.graph.cm.NWSRef)\n# plt.savefig('')",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a6443faa756e52b3a319d6dfb78a61a2e140142
| 32,623 |
ipynb
|
Jupyter Notebook
|
codes/.ipynb_checkpoints/2_Vector_Space_Model-checkpoint.ipynb
|
dai39suke/iro_assignment
|
f1c6c621d618febcd038434b227b43b67c28092f
|
[
"MIT"
] | null | null | null |
codes/.ipynb_checkpoints/2_Vector_Space_Model-checkpoint.ipynb
|
dai39suke/iro_assignment
|
f1c6c621d618febcd038434b227b43b67c28092f
|
[
"MIT"
] | null | null | null |
codes/.ipynb_checkpoints/2_Vector_Space_Model-checkpoint.ipynb
|
dai39suke/iro_assignment
|
f1c6c621d618febcd038434b227b43b67c28092f
|
[
"MIT"
] | null | null | null | 36.047514 | 10,960 | 0.660209 |
[
[
[
"# 第2回 ベクトル空間モデル\n\nこの演習ページでは,ベクトル空間モデルに基づく情報検索モデルについて説明します.具体的には,文書から特徴ベクトルへの変換方法,TF-IDFの計算方法,コサイン類似度による文書ランキングについて,その実装例を説明します.第2回演習の最終目的は,ある与えられた文書コーパスに対して,TF-IDFで重み付けされた特徴ベクトルによる文書ランキングが実装できるようになることです.",
"_____no_output_____"
],
[
"## ライブラリ\nこの回の演習では,以下のライブラリを使用します. \n- [numpy, scipy](http://www.numpy.org/)\n + Pythonで科学技術計算を行うための基礎的なライブラリ.\n- [gensim](https://radimrehurek.com/gensim/index.html)\n + トピックモデリング(LDA)やword2vecなどを手軽に利用するためのPythonライブラリ.\n- [nltk (natural language toolkit)](http://www.nltk.org/)\n + 自然言語処理に関するpythonライブラリです.この演習ではストップワードのために用います.ほかにも,単語のステミングやトークナイズなどの機能をはじめ,品詞推定,依存関係分析など自然言語処理のあらゆるメソッドが用意されています.\n- [pandas](http://pandas.pydata.org/)\n + pythonでデータ分析をするためのフレームワークです.この演習ではデータをプロットするために用いています.",
"_____no_output_____"
],
[
"## 第2回目の演習の内容\n``h29iro/data/`` に `sample.corpus` というファイルを置いています. このファイルには改行区切りで3件の短い文書が保存されています.この演習では,このファイルに対してTF-IDFで重み付けされた特徴ベクトルを作成し,コサイン類似度によるランキングを行います.",
"_____no_output_____"
],
[
"## 1. 文書の読み込みとトークナイズ\nまずは,`sample.corpus`を読み込み,各文書のBoW表現を抽出します.",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport gensim\nfrom nltk.corpus import stopwords\nimport pandas as pd\nnp.set_printoptions(precision=4)",
"_____no_output_____"
],
[
"# 小数点3ケタまで表示\n%precision 3",
"_____no_output_____"
],
[
"with open(\"../data/sample.corpus\", \"r\") as f: #sample.corpusの読み込み\n text = f.read().strip().split(\"\\n\") #sample.corpusのテキストデータを取得し,それを改行で分割\ntext",
"_____no_output_____"
]
],
[
[
"3件の文書があることが分かりますね.次に,文章をトークン(単語)に分割します.今回は簡単のため単純にスペース区切りによって単語に分割します.",
"_____no_output_____"
]
],
[
[
"raw_corpus = [d.lower().split() for d in text] #文章を小文字に変換して単語に分割する\nprint(\"d1=\" , raw_corpus[0])\nprint(\"d2=\" , raw_corpus[1])\nprint(\"d3=\" , raw_corpus[2])",
"d1= ['i', 'live', 'in', 'kyoto', 'and', 'kyoto', 'is', 'a', 'beautiful', 'city']\nd2= ['kyoto', 'was', 'the', 'captial', 'of', 'japan', 'and', 'is', 'in', 'kansai', 'and', 'kansai', 'is', 'in', 'japan']\nd3= ['kyoto', 'is', 'in', 'kansai', 'and', 'kyoto', 'is', 'historical', 'city']\n"
]
],
[
[
"文が単語の集合に変換されました.しかし,この単語集合には \"i\" や \"of\" などのストップワードが含まれています.そこで,ストップワードを除去してみましょう.\n\nストップワードのリストはネットで探せば様々な種類が見つかります.ここでは,nltkのstopwordsモジュールを利用します.",
"_____no_output_____"
]
],
[
[
"# stopwords.words(\"english\")に含まれていない単語のみ抽出\ncorpus = [list(filter(lambda word: word not in stopwords.words(\"english\"), x)) for x in raw_corpus] \nprint(\"d1=\" , corpus[0])\nprint(\"d2=\" , corpus[1])\nprint(\"d3=\" , corpus[2])",
"d1= ['live', 'kyoto', 'kyoto', 'beautiful', 'city']\nd2= ['kyoto', 'captial', 'japan', 'kansai', 'kansai', 'japan']\nd3= ['kyoto', 'kansai', 'kyoto', 'historical', 'city']\n"
]
],
[
[
"## 2. 特徴ベクトルの生成\n次に文書の特徴ベクトルを生成します.ここからの流れは,以下の通りになります.\n\n1. 文書集合(corpus)から 単語->単語ID の辞書 (dictionary) を作成する.\n2. 作成された辞書を基に,文書を (単語ID,出現回数)の集合 (id_corpus) として表現する.\n3. id_corpusからTfidfModelを用いて,TF-IDFで重み付けされた特徴ベクトルを作成する.\n\nまずは,文書集合(コーパス)から単語->単語ID の辞書 (dictionary) を作成します.\n",
"_____no_output_____"
]
],
[
[
"dictionary = gensim.corpora.Dictionary(corpus) #コーパスを与えて,単語->IDの辞書を作成する\ndictionary.token2id #作成された辞書の中身",
"_____no_output_____"
]
],
[
[
"このdictionaryを用いて,文書の単語をID化します.",
"_____no_output_____"
]
],
[
[
"id_corpus = [dictionary.doc2bow(document) for document in corpus]\nid_corpus",
"_____no_output_____"
]
],
[
[
"作成されたid_corpusは,たとえば,1件目の文書は",
"_____no_output_____"
]
],
[
[
"id_corpus[0]",
"_____no_output_____"
]
],
[
[
"という内容になっています.たとえば,(0,2)というデータは\n```\n単語ID0の単語が2回出現\n```\nという内容を表しています. つまり,単語の出現頻度(term frequency)のみで文書を特徴ベクトル化したことになります.なお,これをnumpyのベクトルとして抽出したければ,corpus2denseメソッドを用います.",
"_____no_output_____"
]
],
[
[
"tf_vectors = gensim.matutils.corpus2dense(id_corpus, len(dictionary)).T\nprint(\"d1=\", tf_vectors[0])\nprint(\"d2=\", tf_vectors[1])\nprint(\"d3=\", tf_vectors[2])",
"d1= [ 1. 1. 2. 1. 0. 0. 0. 0.]\nd2= [ 0. 0. 1. 0. 1. 2. 2. 0.]\nd3= [ 0. 0. 2. 1. 0. 1. 0. 1.]\n"
]
],
[
[
"今回用意したコーパスは語彙数が8しかありませんが,実際のケースでは,この特徴ベクトルは非常に疎になることが容易に想像つくと思います.\n\nさて,id_corpusからTFIDFで重み付けされた特徴ベクトルを得るには, models.TfidfModel メソッドを用います.",
"_____no_output_____"
]
],
[
[
"tfidf_model = gensim.models.TfidfModel(id_corpus, normalize=False) #normalize=Trueにすると,文書長によってtfを正規化する\ntfidf_corpus = tfidf_model[id_corpus] #id_corpusをtfidfで重み付けされたものに変換",
"_____no_output_____"
]
],
[
[
"これでTF-IDFで重み付けされた特徴ベクトルが得られました.たとえば,1件目の文書$d_1$に対する特徴ベクトル${\\mathbf d}_1$の中身を見てみます.",
"_____no_output_____"
]
],
[
[
"tfidf_corpus[0]",
"_____no_output_____"
]
],
[
[
"TFIDFの値は,(単語ID,重み) として得られています.単語IDを実際の単語に変換するにはdictionaryを通します.",
"_____no_output_____"
]
],
[
[
"[(dictionary[x[0]], x[1]) for x in tfidf_corpus[0]]#dictionary[token_id]でアクセスすると実際の単語が返ってくる",
"_____no_output_____"
]
],
[
[
"同様に2件目の文書$d_2$についても見てみます.",
"_____no_output_____"
]
],
[
[
"doc2 = [(dictionary[x[0]], x[1]) for x in tfidf_corpus[1]]\ndoc2",
"_____no_output_____"
]
],
[
[
"たとえば, 文書$d_{2}$における`japan`のTFIDF値が本当に正しいのか検証してみましょう.\n\n$tfidf_{d_2, japan} = tf_{d_2, japan} \\log \\frac{N}{df_{japan}}$ ,\n\nいま, $tf_{d_2, japan} = 2$, $N = 3$, $df_{japan}$ = 1 ですので,\n\n$tfidf_{d_2, japan} = 2 \\log 3 = 3.170$\nとなり,gensimで得られた結果と一致していることが分かります.",
"_____no_output_____"
]
],
[
[
"import math\n2*math.log2(3) #2log3の計算方法",
"_____no_output_____"
]
],
[
[
"# 3. コサイン類似度\nそれでは,コサイン類似度による文書ランキングを行ってみましょう.\n\nクエリと文書の類似度を測る前に,まずは文書同士のコサイン類似度を計算してみます. コサイン類似度の計算はgensimでも良いのですが,ここでは,いったんnumpyのベクトルを取得して,そのベクトルに対してコサイン類似度を計算してみます.",
"_____no_output_____"
]
],
[
[
"# 各文書のtfidfベクトルを取得\ntfidf_vectors = gensim.matutils.corpus2dense(tfidf_corpus, len(dictionary)).T\nprint (\"d1=\", tfidf_vectors[0])\nprint (\"d2=\", tfidf_vectors[1])\nprint (\"d3=\", tfidf_vectors[2])",
"d1= [ 1.585 1.585 0. 0.585 0. 0. 0. 0. ]\nd2= [ 0. 0. 0. 0. 1.585 1.17 3.17 0. ]\nd3= [ 0. 0. 0. 0.585 0. 0.585 0. 1.585]\n"
],
[
"# コサイン類似度を計算する関数を用意\nfrom scipy.spatial.distance import cosine\ndef cosine_sim(v1, v2):\n #scipyのcosineは類似度ではなく距離関数のため, 1-コサイン距離 とすることで,コサイン類似度に変換する\n return 1.0 - cosine(v1, v2) ",
"_____no_output_____"
],
[
"# 各文書間のコサイン類似度を計算してみる\nprint (\"sim(d1, d2)=\", cosine_sim(tfidf_vectors[0], tfidf_vectors[1]))\nprint (\"sim(d2, d3)=\", cosine_sim(tfidf_vectors[1], tfidf_vectors[2]))\nprint (\"sim(d1, d3)=\", cosine_sim(tfidf_vectors[0], tfidf_vectors[2]))",
"sim(d1, d2)= 0.0\nsim(d2, d3)= 0.102562095083\nsim(d1, d3)= 0.082618937993\n"
]
],
[
[
"それでは,クエリを特徴ベクトルに変換し,クエリと文書のコサイン類似度を求めていきましょう.",
"_____no_output_____"
]
],
[
[
"q = {\"kansai\", \"japan\"}\ntfidf_q = tfidf_model[dictionary.doc2bow(q)] #クエリをtfidfベクトルに変換\nquery_vector = gensim.matutils.corpus2dense([tfidf_q], len(dictionary)).T[0] #numpyのベクトルに変換\nprint (\"q=\", query_vector)",
"q= [ 0. 0. 0. 0. 0. 0.585 1.585 0. ]\n"
],
[
"print([(dictionary[x[0]], x[1]) for x in tfidf_q])",
"[('kansai', 0.5849625007211562), ('japan', 1.5849625007211563)]\n"
],
[
"print (\"sim(q, d1) = \", cosine_sim(query_vector, tfidf_vectors[0]))\nprint (\"sim(q, d2) = \", cosine_sim(query_vector, tfidf_vectors[1]))\nprint (\"sim(q, d3) = \", cosine_sim(query_vector, tfidf_vectors[2]))",
"sim(q, d1) = 0.0\nsim(q, d2) = 0.905346644389\nsim(q, d3) = 0.113284893168\n"
]
],
[
[
"この結果から,q={\"kansai\", \"japan\"} というクエリに対しては,$d_2,d_3, d_1$の順でランク付けされることが分かります.",
"_____no_output_____"
],
[
"## 4. ベクトル空間の可視化\n\n最後に,得られた特徴ベクトルを可視化してみましょう.特徴ベクトルそのものは多次元(今回の場合は8次元)ですが,これを次元削減の手法を使って,2次元空間に射影してみます.今回は,`LSI`(Latent Semantic Indexing)という手法を用いて,特徴ベクトルを2次元空間に落とし込みます.LSIについては,講義で触れるかもしれません(講義の進み方次第).",
"_____no_output_____"
]
],
[
[
"import matplotlib.pylab as plt",
"_____no_output_____"
],
[
"%matplotlib inline",
"_____no_output_____"
],
[
"# LSIにより特徴ベクトルを2次元に落とし込む\nlsi = gensim.models.LsiModel(tfidf_corpus, id2word=dictionary, num_topics=2)\nlsi_corpus = lsi[tfidf_corpus]\nlsi_vectors = gensim.matutils.corpus2dense(lsi_corpus, 2).T\nprint(\"d1=\", lsi_vectors[0])\nprint(\"d2=\", lsi_vectors[1])\nprint(\"d3=\", lsi_vectors[2])\nquery_lsi_corpus = lsi[[tfidf_q]] \nquery_lsi_vector = gensim.matutils.corpus2dense(query_lsi_corpus, 2).T[0]\nprint (\"q=\", query_lsi_vector)",
"d1= [-0.009 -2.302]\nd2= [-3.73 0.028]\nd3= [-0.237 -0.346]\nq= [-1.53 0.007]\n"
],
[
"# 散布図にプロットするため,DataFrameに変換\naxis_names = [\"z1\", \"z2\"]\ndoc_names = [\"d1\", \"d2\", \"d3\", \"q\"]\ndf = pd.DataFrame(np.r_[lsi_vectors, [query_lsi_vector]], \n columns=axis_names, index=doc_names) # np.r_ は行列同士の連結\ndf",
"_____no_output_____"
],
[
"# 散布図をプロット\nfig, ax = plt.subplots()\ndf.plot.scatter(x=\"z1\", y=\"z2\", ax=ax)\nax.axvline(x=0, lw=2, color='red') #x軸とy軸に線を引く\nax.axhline(y=0, lw=2, color='red') \nax.grid(True)\nfor k, v in df.iterrows():\n ax.annotate(k, xy=(v[0]+0.05,v[1]+0.05),size=15) #データ点にラベル名を付与",
"_____no_output_____"
]
],
[
[
"この図を見てみると,やはりクエリ$q$と文書$d_2$はほぼ同じ方向(つまり,コサイン類似度が1に近い)であることがわかり, $q$と$d_1$の角度はほぼ直角(つまりコサイン類似度が0)であることがわかります.",
"_____no_output_____"
],
[
"----",
"_____no_output_____"
],
[
"# 演習課題その1 ベクトル空間モデル\n\n## 必須課題(1) 与えられたコーパスに対する検索の実現\n\n以下からコーパスを1つ以上選択し,ベクトル空間モデルに基づいた検索を実現せよ.3種類以上のクエリでの検索結果を示すこと.\n\n\n1. 京都観光に関する83件の文書(h29iro/data/kyoto_results_100.json)\n2. 各自で用意したコーパス.ただし,100件以上の文書数を含むこと.もっと多くてもよい.\n3. Wikipedia([参考: gensim Tutorial](https://radimrehurek.com/gensim/wiki.html) )※ただし,モデル構築にとんでもない時間がかかるそうなので覚悟すること.\n\n\n- ページに表示する検索結果は各クエリ5-10件程度で良い.",
"_____no_output_____"
]
],
[
[
"# 1.のコーパスはjson形式で保管されている.\nimport json\nwith open(\"../data/kyoto_results_100.json\", \"r\") as f:\n docs = json.load(f)\nprint(\"Num of docs = \", len(docs))\ndocs[0]",
"Num of docs = 83\n"
],
[
"# `bow` には形態素解析でトークン化された単語列がスペース区切りで保存されている.\n# これを使用して特徴ベクトルを作成するとよい.\ndocs[0][\"bow\"]",
"_____no_output_____"
]
],
[
[
"## 任意課題(a) Okapi BM25\n\n上記(1)に対して, Okapi BM25 に基づくランキングを行い,上記(1)の結果と比較してみよ.\n\n## 任意課題(b) 適合性フィードバック\n\n適合性フィードバックによるクエリ修正を行い,検索結果がどのように変化するのか分析せよ.また,コーパス及びクエリを可視化することで,修正されたクエリが適合・不適合文書の特徴ベクトルにどのように影響されているか幾何的に分析せよ.\n\n\n# 課題の提出方法\n\nいずれかの方法で,ipython notebookのページ(.ipynbファイル)とそのhtml版を提出すること.\n\n1. 添付ファイルで山本に送信.\n - 送付先 tyamamot at dl.kuis.kyoto-u.ac.jp\n2. 各自のgithubやgithub gistにアップロードし,そのURLを山本に送信.この場合はhtml版を用意する必要はない.\n3. 上記以外で,山本が実際に.ipynbファイルを確認できる方法.\n\n\n# 締切\n\n- 2017年11月30日(木)23:59\n- 締切に関する個別の相談は``受け付けます``.",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
]
] |
4a6467850f39da7608747102b8ec50d89122aa54
| 14,501 |
ipynb
|
Jupyter Notebook
|
TD3PG/TD3PG.ipynb
|
abr-98/Reinforcement-Learning
|
40f39233b7156d4821d1ba86664ea82b418eb077
|
[
"MIT"
] | null | null | null |
TD3PG/TD3PG.ipynb
|
abr-98/Reinforcement-Learning
|
40f39233b7156d4821d1ba86664ea82b418eb077
|
[
"MIT"
] | null | null | null |
TD3PG/TD3PG.ipynb
|
abr-98/Reinforcement-Learning
|
40f39233b7156d4821d1ba86664ea82b418eb077
|
[
"MIT"
] | null | null | null | 44.075988 | 208 | 0.557548 |
[
[
[
"!pip install roboschool==1.0.48 gym==0.15.4",
"Collecting roboschool==1.0.48\n\u001b[?25l Downloading https://files.pythonhosted.org/packages/da/31/ce69340a0698e85de2db787023aee5c9416d4ab2ded8cbccf97168ceec81/roboschool-1.0.48-cp37-cp37m-manylinux1_x86_64.whl (44.9MB)\n\u001b[K |████████████████████████████████| 44.9MB 148kB/s \n\u001b[?25hCollecting gym==0.15.4\n\u001b[?25l Downloading https://files.pythonhosted.org/packages/1d/85/a7a462d7796f097027d60f9a62b4e17a0a94dcf12ac2a9f9a913333b11a6/gym-0.15.4.tar.gz (1.6MB)\n\u001b[K |████████████████████████████████| 1.6MB 31.7MB/s \n\u001b[?25hRequirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from gym==0.15.4) (1.4.1)\nRequirement already satisfied: numpy>=1.10.4 in /usr/local/lib/python3.7/dist-packages (from gym==0.15.4) (1.19.5)\nRequirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from gym==0.15.4) (1.15.0)\nCollecting pyglet<=1.3.2,>=1.2.0\n\u001b[?25l Downloading https://files.pythonhosted.org/packages/1c/fc/dad5eaaab68f0c21e2f906a94ddb98175662cc5a654eee404d59554ce0fa/pyglet-1.3.2-py2.py3-none-any.whl (1.0MB)\n\u001b[K |████████████████████████████████| 1.0MB 24.2MB/s \n\u001b[?25hCollecting cloudpickle~=1.2.0\n Downloading https://files.pythonhosted.org/packages/c1/49/334e279caa3231255725c8e860fa93e72083567625573421db8875846c14/cloudpickle-1.2.2-py2.py3-none-any.whl\nRequirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from gym==0.15.4) (4.1.2.30)\nRequirement already satisfied: future in /usr/local/lib/python3.7/dist-packages (from pyglet<=1.3.2,>=1.2.0->gym==0.15.4) (0.16.0)\nBuilding wheels for collected packages: gym\n Building wheel for gym (setup.py) ... \u001b[?25l\u001b[?25hdone\n Created wheel for gym: filename=gym-0.15.4-cp37-none-any.whl size=1648486 sha256=33a9fded28517a6790a17cbbbb03304dd37da2a891ba3ef484f655e399559518\n Stored in directory: /root/.cache/pip/wheels/e9/26/9b/8a1a6599a91077a938ac4348cc3d3ac84bfab0dbfddeb4c6e7\nSuccessfully built gym\n\u001b[31mERROR: tensorflow-probability 0.13.0 has requirement cloudpickle>=1.3, but you'll have cloudpickle 1.2.2 which is incompatible.\u001b[0m\nInstalling collected packages: pyglet, cloudpickle, gym, roboschool\n Found existing installation: pyglet 1.5.0\n Uninstalling pyglet-1.5.0:\n Successfully uninstalled pyglet-1.5.0\n Found existing installation: cloudpickle 1.3.0\n Uninstalling cloudpickle-1.3.0:\n Successfully uninstalled cloudpickle-1.3.0\n Found existing installation: gym 0.17.3\n Uninstalling gym-0.17.3:\n Successfully uninstalled gym-0.17.3\nSuccessfully installed cloudpickle-1.2.2 gym-0.15.4 pyglet-1.3.2 roboschool-1.0.48\n"
],
[
"import tensorflow as tf\nimport numpy as np\nimport gym\nimport roboschool",
"_____no_output_____"
],
[
"class TD3PG:\n\n def __init__(self,env,memory):\n self.env=env\n self.state_dimension=env.observation_space.shape\n self.action_dimension=env.action_space.shape[0]\n self.min_action=env.action_space.low[0]\n self.max_action=env.action_space.high[0]\n self.Train_actor=None\n self.Target_actor=None\n self.Train_critic_1=None\n self.Target_critic_1=None\n self.Train_critic_2=None\n self.Target_critic_2=None\n self.memory=memory\n self.batch_size=128\n self.collect_initial_=10000\n self.cr_1_opt=tf.keras.optimizers.Adam(0.001)\n self.cr_2_opt=tf.keras.optimizers.Adam(0.001)\n self.ac_opt=tf.keras.optimizers.Adam(0.001) \n self.steps_to_stop_exp=2000\n self.steps_to_train=1000000\n self.update_actor_step=2\n self.tau=0.005\n \n def get_critic(self):\n\n input_state=tf.keras.layers.Input(self.state_dimension)\n input_action=tf.keras.layers.Input(self.action_dimension)\n layer_1=tf.keras.layers.concatenate([input_state,input_action],axis=-1)\n layer_2=tf.keras.layers.Dense(400,activation=\"relu\")(layer_1)\n layer_3=tf.keras.layers.Dense(300,activation=\"relu\")(layer_2)\n out_Q=tf.keras.layers.Dense(1,activation=None)(layer_3)\n\n model=tf.keras.Model(inputs=[input_state,input_action],outputs=[out_Q])\n return model\n\n def get_actor(self):\n\n input=tf.keras.layers.Input(self.state_dimension)\n layer_1=tf.keras.layers.Dense(400,activation=\"relu\")(input)\n layer_2=tf.keras.layers.Dense(300,activation=\"relu\")(layer_1)\n out=tf.keras.layers.Dense(self.action_dimension,activation=\"tanh\")(layer_2)\n\n model=tf.keras.Model(inputs=[input],outputs=[out])\n return model\n\n def get_action(self,actor,s,sigma=0,noise=False):\n mu=actor(s)\n Noise_sigma=sigma\n if noise:\n action=mu+tf.random.normal(shape=[self.action_dimension],mean=0,stddev=Noise_sigma)\n else:\n action=mu\n\n action=self.max_action*(tf.clip_by_value(action,self.min_action,self.max_action)) ## AS tanh is used in activation\n return action\n \n def get_Q_value(self,critic,s,a):\n q=critic([s,a])\n return q\n \n def initialize_buffer(self):\n \n curr_state=self.env.reset()\n for _ in range(self.collect_initial_):\n action=self.env.action_space.sample()\n next_state,reward,done,_=self.env.step(action)\n self.memory.push(curr_state,action,reward,next_state,not done)\n\n if done:\n curr_state=self.env.reset()\n else:\n curr_state=next_state\n\n def update_networks(self,target_net,train_net,tau):\n weights_tar, weights_tra = target_net.get_weights(), train_net.get_weights()\n for i in range(len(weights_tar)):\n weights_tar[i] = tau*weights_tra[i] + (1-tau)*weights_tar[i]\n target_net.set_weights(weights_tar)\n \n def critic_pred(self,critic,states):\n\n c=0.5\n mu=self.Target_actor(states)\n noise_action=mu+tf.clip_by_value(tf.random.normal(shape=[self.action_dimension],mean=0,stddev=0.2),-c,c)\n predicted_actions=self.max_action*tf.clip_by_value(noise_action,self.min_action,self.max_action)\n\n next_state_value=self.get_Q_value(critic,states,predicted_actions)\n return next_state_value\n \n def loss_critics(self,states, actions, rewards, next_states, not_dones, gamma=0.99):\n next_value_1=tf.squeeze(self.critic_pred(self.Target_critic_1,next_states))\n next_value_2=tf.squeeze(self.critic_pred(self.Target_critic_2,next_states))\n\n pred_value_1=tf.squeeze(self.get_Q_value(self.Train_critic_1,np.array(states,dtype=\"float32\"),np.array(actions,dtype=\"float32\")))\n pred_value_2=tf.squeeze(self.get_Q_value(self.Train_critic_2,np.array(states,dtype=\"float32\"),np.array(actions,dtype=\"float32\")))\n\n next_value=tf.math.minimum(next_value_1,next_value_2)\n\n target_value= rewards + gamma*next_value*not_dones\n\n critic_loss_1=tf.reduce_mean(tf.math.squared_difference(target_value,pred_value_1))\n critic_loss_2=tf.reduce_mean(tf.math.squared_difference(target_value,pred_value_2))\n\n return critic_loss_1,critic_loss_2\n \n def train(self):\n\n self.Train_actor=self.get_actor()\n self.Target_actor=self.get_actor()\n self.Target_actor.set_weights(self.Train_actor.get_weights())\n self.Train_critic_1=self.get_critic()\n self.Target_critic_1=self.get_critic()\n self.Target_critic_1.set_weights(self.Train_critic_1.get_weights())\n self.Train_critic_2=self.get_critic()\n self.Target_critic_2=self.get_critic()\n self.Target_critic_2.set_weights(self.Train_critic_2.get_weights())\n\n self.initialize_buffer()\n \n\n curr_state=self.env.reset()\n\n overall_Reward=0\n episode_reward=0\n no_of_comp=0\n\n for i in range(self.steps_to_train):\n \n if i<self.steps_to_stop_exp:\n action=self.get_action(self.Train_actor,curr_state.reshape(1,-1),sigma=0.1,noise=True)\n else:\n action=self.get_action(self.Train_actor,curr_state.reshape(1,-1))\n next_state,reward,done,_=self.env.step(action.numpy()[0])\n episode_reward+=reward\n\n self.memory.push(curr_state,action,reward,next_state,not done)\n\n if done:\n\n curr_state=self.env.reset()\n overall_Reward+=episode_reward\n if no_of_comp%20==0:\n print('On step {}, no. of complete episodes {} average episode reward {}'.format(i,no_of_comp,overall_Reward/20))\n overall_Reward=0\n episode_reward=0 ### Updating the reward to 0\n no_of_comp+=1\n else:\n curr_state=next_state\n\n states, actions, rewards, next_states, not_dones = self.memory.sample(self.batch_size)\n\n with tf.GradientTape() as t1, tf.GradientTape() as t2:\n critic_loss_1,critic_loss_2=self.loss_critics(states, actions, rewards, next_states, not_dones)\n\n grad_crit_1=t1.gradient(critic_loss_1,self.Train_critic_1.trainable_variables)\n grad_crit_2=t2.gradient(critic_loss_2,self.Train_critic_2.trainable_variables)\n\n self.cr_1_opt.apply_gradients(zip(grad_crit_1,self.Train_critic_1.trainable_variables))\n self.cr_2_opt.apply_gradients(zip(grad_crit_2,self.Train_critic_2.trainable_variables))\n\n if i % self.update_actor_step==0:\n\n with tf.GradientTape() as t:\n new_actions=self.Train_actor(states)\n act_loss=-1*tf.reduce_mean(self.Train_critic_1([states,new_actions]))\n\n grad_act=t.gradient(act_loss,self.Train_actor.trainable_variables)\n self.ac_opt.apply_gradients(zip(grad_act,self.Train_actor.trainable_variables))\n\n self.update_networks(self.Target_actor,self.Train_actor,self.tau)\n self.update_networks(self.Target_critic_1,self.Train_critic_1,self.tau)\n self.update_networks(self.Target_critic_2,self.Train_critic_2,self.tau)\n\n",
"_____no_output_____"
],
[
"env = gym.make('RoboschoolInvertedPendulum-v1')",
"_____no_output_____"
],
[
"from memory_module import replayBuffer\nmemory=replayBuffer(100000)",
"_____no_output_____"
],
[
"agent=TD3PG(env,memory)",
"_____no_output_____"
],
[
"agent.train()",
"_____no_output_____"
],
[
"",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a647328c64f1098359987a9bf6259b77ebcc609
| 32,933 |
ipynb
|
Jupyter Notebook
|
notebooks/5-2.mnist_digit_pixel_by_pixel.ipynb
|
rickiepark/dl-illustrated
|
c1c7bda9064d0560710ca125c35453b5a51f4108
|
[
"MIT"
] | 10 |
2021-01-30T22:40:45.000Z
|
2022-03-01T03:32:07.000Z
|
notebooks/5-2.mnist_digit_pixel_by_pixel.ipynb
|
higuseonhye/dl-illustrated
|
c1c7bda9064d0560710ca125c35453b5a51f4108
|
[
"MIT"
] | null | null | null |
notebooks/5-2.mnist_digit_pixel_by_pixel.ipynb
|
higuseonhye/dl-illustrated
|
c1c7bda9064d0560710ca125c35453b5a51f4108
|
[
"MIT"
] | 10 |
2021-01-21T16:06:15.000Z
|
2022-03-03T07:16:39.000Z
| 137.220833 | 26,370 | 0.867458 |
[
[
[
"# MNIST 숫자의 픽셀 이미지",
"_____no_output_____"
],
[
"MNIST 숫자를 적재하고 자세한 픽셀 수준의 그림을 그립니다. ",
"_____no_output_____"
],
[
"[](https://colab.research.google.com/github/rickiepark/dl-illustrated/blob/master/notebooks/5-2.mnist_digit_pixel_by_pixel.ipynb)",
"_____no_output_____"
],
[
"#### 라이브러리 적재",
"_____no_output_____"
]
],
[
[
"from matplotlib import pyplot as plt\nimport numpy as np\nfrom tensorflow.keras.datasets import mnist",
"_____no_output_____"
]
],
[
[
"#### 데이터 적재",
"_____no_output_____"
]
],
[
[
"(X_train, y_train), (X_test, y_valid) = mnist.load_data()",
"Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz\n11493376/11490434 [==============================] - 0s 0us/step\n11501568/11490434 [==============================] - 0s 0us/step\n"
]
],
[
[
"#### 이미지 샘플링",
"_____no_output_____"
]
],
[
[
"# sample = np.random.randint(0, X_train.shape[0])\nsample = 39235",
"_____no_output_____"
]
],
[
[
"#### 숫자 출력",
"_____no_output_____"
]
],
[
[
"plt.figure(figsize = (10,10))\nmnist_img = X_train[sample]\nplt.imshow(mnist_img,cmap=\"Greys\")\nax = plt.gca()\n\n# First turn off the major labels, but not the major ticks\nplt.tick_params(\n axis='both', # changes apply to the both x and y axes\n which='major', # Change the major ticks only\n bottom=True, # ticks along the bottom edge are on\n left=True, # ticks along the top edge are on\n labelbottom=False, # labels along the bottom edge are off\n labelleft=False) # labels along the left edge are off\n\n# Next turn off the minor ticks, but not the minor labels\nplt.tick_params(\n axis='both', # changes apply to both x and y axes\n which='minor', # Change the minor ticks only\n bottom=False, # ticks along the bottom edge are off\n left=False, # ticks along the left edge are off\n labelbottom=True, # labels along the bottom edge are on\n labelleft=True) # labels along the left edge are on\n\n# Set the major ticks, starting at 1 (the -0.5 tick gets hidden off the canvas)\nax.set_xticks(np.arange(-.5, 28, 1))\nax.set_yticks(np.arange(-.5, 28, 1))\n\n# Set the minor ticks and labels\nax.set_xticks(np.arange(0, 28, 1), minor=True);\nax.set_xticklabels([str(i) for i in np.arange(0, 28, 1)], minor=True);\nax.set_yticks(np.arange(0, 28, 1), minor=True);\nax.set_yticklabels([str(i) for i in np.arange(0, 28, 1)], minor=True);\n\nax.grid(color='black', linestyle='-', linewidth=1.5)\n_ = plt.colorbar(fraction=0.046, pad=0.04, ticks=[0,32,64,96,128,160,192,224,255])",
"_____no_output_____"
]
],
[
[
"#### 이미지 레이블 확인",
"_____no_output_____"
]
],
[
[
"y_train[sample]",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a6476c750fc67f36cc20fde25a4abccce2a111e
| 8,752 |
ipynb
|
Jupyter Notebook
|
notebooks/02 pyconuk_exploratory_analysis.ipynb
|
bibs440v/NLP
|
6c0d79a5e699791df0887ca30a1f4fa6f48c4226
|
[
"MIT"
] | 314 |
2016-09-08T03:37:20.000Z
|
2022-02-17T11:49:29.000Z
|
notebooks/02 pyconuk_exploratory_analysis.ipynb
|
MauDavalos/nlp-tutorial
|
6c0d79a5e699791df0887ca30a1f4fa6f48c4226
|
[
"MIT"
] | 6 |
2020-11-18T21:58:49.000Z
|
2022-03-11T23:52:36.000Z
|
notebooks/02 pyconuk_exploratory_analysis.ipynb
|
bibs440v/NLP
|
6c0d79a5e699791df0887ca30a1f4fa6f48c4226
|
[
"MIT"
] | 164 |
2016-09-18T23:48:26.000Z
|
2021-02-10T16:42:09.000Z
| 25.516035 | 133 | 0.532335 |
[
[
[
"# Exploring Text Data (2)\n\n## PyConUK talk abstract\n\nData set of abstracts for the PyConUK 2016 talks (retrieved 14th Sept 2016 from https://github.com/PyconUK/2016.pyconuk.org)\n\nThe data can be found in `../data/pyconuk2016/{keynotes,workshops,talks}/*`\n\nThere are 101 abstracts",
"_____no_output_____"
],
[
"## Load the data\n\nFirstly, we load all the data into the `documents` dictionary\n\nWe also merge the documents into one big string, `corpus_all_in_one`, for convenience",
"_____no_output_____"
]
],
[
[
"import os\n\ndata_dir = os.path.join('..', 'data', 'pyconuk2016')\n\ntalk_types = ['keynotes', 'workshops', 'talks']\n\nall_talk_files = [os.path.join(data_dir, talk_type, fname)\n for talk_type in talk_types\n for fname in os.listdir(os.path.join(data_dir, talk_type))]\n\ndocuments = {}\nfor talk_fname in all_talk_files:\n bname = os.path.basename(talk_fname)\n talk_title = os.path.splitext(bname)[0]\n with open(talk_fname, 'r') as f:\n content = f.read()\n documents[talk_title] = content\n \ncorpus_all_in_one = ' '.join([doc for doc in documents.values()])\n \nprint(\"Number of talks: {}\".format(len(all_talk_files)))\nprint(\"Corpus size (char): {}\".format(len(corpus_all_in_one)))",
"_____no_output_____"
],
[
"%matplotlib inline\nimport matplotlib.pyplot as plt\nfrom wordcloud import WordCloud\n\ncloud = WordCloud(max_words=100)\ncloud.generate_from_text(corpus_all_in_one)\n\nplt.figure(figsize=(12,8))\nplt.imshow(cloud)\nplt.axis('off')\nplt.show()",
"_____no_output_____"
],
[
"all_talk_files[0]",
"_____no_output_____"
],
[
"%cat {all_talk_files[0]}\n\n# For a list of magics type:\n# %lsmagic",
"_____no_output_____"
],
[
"documents = {}\nfor talk_fname in all_talk_files:\n bname = os.path.basename(talk_fname)\n talk_title = os.path.splitext(bname)[0]\n with open(talk_fname, 'r') as f:\n content = \"\"\n for line in f:\n if line.startswith('title:'):\n line = line[6:]\n if line.startswith('subtitle:') \\\n or line.startswith('speaker:') \\\n or line.startswith('---'):\n continue\n content += line\n documents[talk_title] = content\n\ncorpus_all_in_one = ' '.join([doc for doc in documents.values()])",
"_____no_output_____"
],
[
"%matplotlib inline\nimport matplotlib.pyplot as plt\nfrom wordcloud import WordCloud\n\ncloud = WordCloud(max_words=100)\ncloud.generate_from_text(corpus_all_in_one)\n\nplt.figure(figsize=(12,8))\nplt.imshow(cloud)\nplt.axis('off')\nplt.show()",
"_____no_output_____"
],
[
"from collections import Counter\nimport string\nfrom nltk.tokenize import word_tokenize\nfrom nltk.corpus import stopwords\n\nstop_list = stopwords.words('english') + list(string.punctuation)\n\ndocument_frequency = Counter()\n\nfor talk_id, content in documents.items():\n try: # py3\n tokens = word_tokenize(content)\n except UnicodeDecodeError: # py27\n tokens = word_tokenize(content.decode('utf-8'))\n unique_tokens = [token.lower() for token in set(tokens)\n if token.lower() not in stop_list]\n document_frequency.update(unique_tokens)\n\nfor word, freq in document_frequency.most_common(20):\n print(\"{}\\t{}\".format(word, freq))",
"_____no_output_____"
],
[
"# print(stop_list)\nfor item in ['will', \"'ll\", 'll']:\n print(\"{} in stop_list == {}\".format(item, item in stop_list))",
"_____no_output_____"
],
[
"from nltk import ngrams\n\ntry:\n all_tokens = [t for t in word_tokenize(corpus_all_in_one)]\nexcept UnicodeDecodeError:\n all_tokens = [t for t in word_tokenize(corpus_all_in_one.decode('utf-8'))]\n\nbigrams = ngrams(all_tokens, 2)\ntrigrams = ngrams(all_tokens, 3)\n\nbi_count = Counter(bigrams)\ntri_count = Counter(trigrams)\n\nfor phrase, freq in bi_count.most_common(20):\n print(\"{}\\t{}\".format(phrase, freq))\n \nfor phrase, freq in tri_count.most_common(20):\n print(\"{}\\t{}\".format(phrase, freq))",
"_____no_output_____"
]
],
[
[
"## Term Frequency (TF)\n\nTF provides a weight of a term within a document, based on the term frequency\n\nTF(term, doc) = count(term in doc)\n\nTF(term, doc) = count(term in doc) / len(doc)\n\n\n## Inverse Document Frequency (IDF)\n\nIDF provides a weight of a term across the collection, based on the document frequency of such term\n\nIDF(term) = log( N / DF(term) )\n\nIDF(term) = log( 1 + N / DF(term) )",
"_____no_output_____"
],
[
"## Introducing sklearn\n\nSo far, we have used some homemade implementation to count words\n\nWhat if we need something more involved?\n\nsklearn (http://scikit-learn.org/) is one of the main libraries for Machine Learning in Python\n\nWith an easy-to-use interface, it provides support for a variety of Machine Learning models\n\nWe're going to use it to tackle a Text Classification problem\n",
"_____no_output_____"
]
],
[
[
"from random import randint\n\nwinner = randint(1, 36)\n\nprint(\"And the winner is ... {}\".format(winner))",
"_____no_output_____"
],
[
"from nltk import pos_tag\nfrom nltk.tokenize import word_tokenize\n\ns = \"The quick brown fox juped over the dog\"\ntokens = word_tokenize(s)\ntokens\n",
"_____no_output_____"
],
[
"pos_tag(tokens)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
]
] |
4a647e3977056b84b55dbf564803c8ffc0a854e1
| 246,994 |
ipynb
|
Jupyter Notebook
|
labs/03_neural_recsys/Explicit_Feedback_Neural_Recommender_System_rendered.ipynb
|
mm-nasr/lectures-labs
|
4f505b83bcb5c6c2b2d7945516ae87dc338e6938
|
[
"MIT"
] | 1 |
2021-07-29T18:32:31.000Z
|
2021-07-29T18:32:31.000Z
|
labs/03_neural_recsys/Explicit_Feedback_Neural_Recommender_System_rendered.ipynb
|
mm-nasr/lectures-labs
|
4f505b83bcb5c6c2b2d7945516ae87dc338e6938
|
[
"MIT"
] | null | null | null |
labs/03_neural_recsys/Explicit_Feedback_Neural_Recommender_System_rendered.ipynb
|
mm-nasr/lectures-labs
|
4f505b83bcb5c6c2b2d7945516ae87dc338e6938
|
[
"MIT"
] | null | null | null | 113.040732 | 63,264 | 0.820162 |
[
[
[
"# Explicit Feedback Neural Recommender Systems\n\nGoals:\n- Understand recommender data\n- Build different models architectures using Keras\n- Retrieve Embeddings and visualize them\n- Add metadata information as input to the model",
"_____no_output_____"
]
],
[
[
"%matplotlib inline\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os.path as op\n\nfrom zipfile import ZipFile\ntry:\n from urllib.request import urlretrieve\nexcept ImportError: # Python 2 compat\n from urllib import urlretrieve\n\n\nML_100K_URL = \"http://files.grouplens.org/datasets/movielens/ml-100k.zip\"\nML_100K_FILENAME = ML_100K_URL.rsplit('/', 1)[1]\nML_100K_FOLDER = 'ml-100k'\n\nif not op.exists(ML_100K_FILENAME):\n print('Downloading %s to %s...' % (ML_100K_URL, ML_100K_FILENAME))\n urlretrieve(ML_100K_URL, ML_100K_FILENAME)\n\nif not op.exists(ML_100K_FOLDER):\n print('Extracting %s to %s...' % (ML_100K_FILENAME, ML_100K_FOLDER))\n ZipFile(ML_100K_FILENAME).extractall('.')",
"_____no_output_____"
]
],
[
[
"### Ratings file\n\nEach line contains a rated movie: \n- a user\n- an item\n- a rating from 1 to 5 stars",
"_____no_output_____"
]
],
[
[
"import pandas as pd\n\nraw_ratings = pd.read_csv(op.join(ML_100K_FOLDER, 'u.data'), sep='\\t',\n names=[\"user_id\", \"item_id\", \"rating\", \"timestamp\"])\nraw_ratings.head()",
"_____no_output_____"
]
],
[
[
"### Item metadata file\n\nThe item metadata file contains metadata like the name of the movie or the date it was released. The movies file contains columns indicating the movie's genres. Let's only load the first five columns of the file with `usecols`.",
"_____no_output_____"
]
],
[
[
"m_cols = ['item_id', 'title', 'release_date', 'video_release_date', 'imdb_url']\nitems = pd.read_csv(op.join(ML_100K_FOLDER, 'u.item'), sep='|',\n names=m_cols, usecols=range(5), encoding='latin-1')\nitems.head()",
"_____no_output_____"
]
],
[
[
"Let's write a bit of Python preprocessing code to extract the release year as an integer value:",
"_____no_output_____"
]
],
[
[
"def extract_year(release_date):\n if hasattr(release_date, 'split'):\n components = release_date.split('-')\n if len(components) == 3:\n return int(components[2])\n # Missing value marker\n return 1920\n\n\nitems['release_year'] = items['release_date'].map(extract_year)\nitems.hist('release_year', bins=50);",
"_____no_output_____"
]
],
[
[
"Enrich the raw ratings data with the collected items metadata:",
"_____no_output_____"
]
],
[
[
"all_ratings = pd.merge(items, raw_ratings)",
"_____no_output_____"
],
[
"all_ratings.head()",
"_____no_output_____"
]
],
[
[
"### Data preprocessing\n\nTo understand well the distribution of the data, the following statistics are computed:\n- the number of users\n- the number of items\n- the rating distribution\n- the popularity of each movie",
"_____no_output_____"
]
],
[
[
"max_user_id = all_ratings['user_id'].max()\nmax_user_id",
"_____no_output_____"
],
[
"max_item_id = all_ratings['item_id'].max()\nmax_item_id",
"_____no_output_____"
],
[
"all_ratings['rating'].describe()",
"_____no_output_____"
]
],
[
[
"Let's do a bit more pandas magic compute the popularity of each movie (number of ratings):",
"_____no_output_____"
]
],
[
[
"popularity = all_ratings.groupby('item_id').size().reset_index(name='popularity')\nitems = pd.merge(popularity, items)\nitems.nlargest(10, 'popularity')",
"_____no_output_____"
]
],
[
[
"Enrich the ratings data with the popularity as an additional metadata.",
"_____no_output_____"
]
],
[
[
"all_ratings = pd.merge(popularity, all_ratings)\nall_ratings.head()",
"_____no_output_____"
]
],
[
[
"Later in the analysis we will assume that this popularity does not come from the ratings themselves but from an external metadata, e.g. box office numbers in the month after the release in movie theaters.\n\nLet's split the enriched data in a train / test split to make it possible to do predictive modeling:",
"_____no_output_____"
]
],
[
[
"from sklearn.model_selection import train_test_split\n\nratings_train, ratings_test = train_test_split(\n all_ratings, test_size=0.2, random_state=0)\n\nuser_id_train = np.array(ratings_train['user_id'])\nitem_id_train = np.array(ratings_train['item_id'])\nrating_train = np.array(ratings_train['rating'])\n\nuser_id_test = np.array(ratings_test['user_id'])\nitem_id_test = np.array(ratings_test['item_id'])\nrating_test = np.array(ratings_test['rating'])",
"_____no_output_____"
]
],
[
[
"# Explicit feedback: supervised ratings prediction\n\nFor each pair of (user, item) try to predict the rating the user would give to the item.\n\nThis is the classical setup for building recommender systems from offline data with explicit supervision signal. ",
"_____no_output_____"
],
[
"## Predictive ratings as a regression problem\n\nThe following code implements the following architecture:\n\n<img src=\"images/rec_archi_1.svg\" style=\"width: 600px;\" />",
"_____no_output_____"
]
],
[
[
"from tensorflow.keras.layers import Embedding, Flatten, Dense, Dropout\nfrom tensorflow.keras.layers import Dot\nfrom tensorflow.keras.models import Model",
"_____no_output_____"
],
[
"# For each sample we input the integer identifiers\n# of a single user and a single item\nclass RegressionModel(Model):\n def __init__(self, embedding_size, max_user_id, max_item_id):\n super().__init__()\n \n self.user_embedding = Embedding(output_dim=embedding_size, input_dim=max_user_id + 1,\n input_length=1, name='user_embedding')\n self.item_embedding = Embedding(output_dim=embedding_size, input_dim=max_item_id + 1,\n input_length=1, name='item_embedding')\n \n # The following two layers don't have parameters.\n self.flatten = Flatten()\n self.dot = Dot(axes=1)\n \n def call(self, inputs):\n user_inputs = inputs[0]\n item_inputs = inputs[1]\n \n user_vecs = self.flatten(self.user_embedding(user_inputs))\n item_vecs = self.flatten(self.item_embedding(item_inputs))\n \n y = self.dot([user_vecs, item_vecs])\n return y\n \nmodel = RegressionModel(30, max_user_id, max_item_id)\nmodel.compile(optimizer='adam', loss='mae')",
"_____no_output_____"
],
[
"# Useful for debugging the output shape of model\ninitial_train_preds = model.predict([user_id_train, item_id_train])\ninitial_train_preds.shape",
"_____no_output_____"
]
],
[
[
"### Model error\n\nUsing `initial_train_preds`, compute the model errors:\n- mean absolute error\n- mean squared error\n\nConverting a pandas Series to numpy array is usually implicit, but you may use `rating_train.values` to do so explicitly. Be sure to monitor the shapes of each object you deal with by using `object.shape`.",
"_____no_output_____"
]
],
[
[
"# %load solutions/compute_errors.py\nsquared_differences = np.square(initial_train_preds[:,0] - rating_train)\nabsolute_differences = np.abs(initial_train_preds[:,0] - rating_train)\n\nprint(\"Random init MSE: %0.3f\" % np.mean(squared_differences))\nprint(\"Random init MAE: %0.3f\" % np.mean(absolute_differences))\n\n# You may also use sklearn metrics to do so using scikit-learn:\n\nfrom sklearn.metrics import mean_squared_error, mean_absolute_error\n\nprint(\"Random init MSE: %0.3f\" % mean_squared_error(initial_train_preds, rating_train))\nprint(\"Random init MAE: %0.3f\" % mean_absolute_error(initial_train_preds, rating_train))\n",
"Random init MSE: 13.720\nRandom init MAE: 3.529\nRandom init MSE: 13.720\nRandom init MAE: 3.529\n"
]
],
[
[
"### Monitoring runs\n\nKeras enables to monitor various variables during training. \n\n`history.history` returned by the `model.fit` function is a dictionary\ncontaining the `'loss'` and validation loss `'val_loss'` after each epoch",
"_____no_output_____"
]
],
[
[
"%%time\n\n# Training the model\nhistory = model.fit([user_id_train, item_id_train], rating_train,\n batch_size=64, epochs=6, validation_split=0.1,\n shuffle=True)",
"Train on 72000 samples, validate on 8000 samples\nEpoch 1/6\n72000/72000 [==============================] - 3s 41us/sample - loss: 2.9085 - val_loss: 1.3112\nEpoch 2/6\n72000/72000 [==============================] - 2s 29us/sample - loss: 0.9590 - val_loss: 0.8337\nEpoch 3/6\n72000/72000 [==============================] - 2s 30us/sample - loss: 0.7814 - val_loss: 0.7825\nEpoch 4/6\n72000/72000 [==============================] - 2s 31us/sample - loss: 0.7514 - val_loss: 0.7677\nEpoch 5/6\n72000/72000 [==============================] - 2s 30us/sample - loss: 0.7399 - val_loss: 0.7619\nEpoch 6/6\n72000/72000 [==============================] - 2s 29us/sample - loss: 0.7332 - val_loss: 0.7569\nCPU times: user 19.8 s, sys: 2.23 s, total: 22 s\nWall time: 13.9 s\n"
],
[
"plt.plot(history.history['loss'], label='train')\nplt.plot(history.history['val_loss'], label='validation')\nplt.ylim(0, 2)\nplt.legend(loc='best')\nplt.title('Loss');",
"_____no_output_____"
]
],
[
[
"**Questions**:\n\n- Why is the train loss higher than the first loss in the first few epochs?\n- Why is Keras not computing the train loss on the full training set at the end of each epoch as it does on the validation set?\n\n\nNow that the model is trained, the model MSE and MAE look nicer:",
"_____no_output_____"
]
],
[
[
"from sklearn.metrics import mean_squared_error\nfrom sklearn.metrics import mean_absolute_error\n\ntest_preds = model.predict([user_id_test, item_id_test])\nprint(\"Final test MSE: %0.3f\" % mean_squared_error(test_preds, rating_test))\nprint(\"Final test MAE: %0.3f\" % mean_absolute_error(test_preds, rating_test))",
"Final test MSE: 0.955\nFinal test MAE: 0.752\n"
],
[
"train_preds = model.predict([user_id_train, item_id_train])\nprint(\"Final train MSE: %0.3f\" % mean_squared_error(train_preds, rating_train))\nprint(\"Final train MAE: %0.3f\" % mean_absolute_error(train_preds, rating_train))",
"Final train MSE: 0.879\nFinal train MAE: 0.716\n"
]
],
[
[
"## A Deep recommender model\n\nUsing a similar framework as previously, the following deep model described in the course was built (with only two fully connected)\n\n<img src=\"images/rec_archi_2.svg\" style=\"width: 600px;\" />\n\nTo build this model we will need a new kind of layer:",
"_____no_output_____"
]
],
[
[
"from tensorflow.keras.layers import Concatenate",
"_____no_output_____"
]
],
[
[
"\n### Exercise\n\n- The following code has **4 errors** that prevent it from working correctly. **Correct them and explain** why they are critical.",
"_____no_output_____"
]
],
[
[
"# For each sample we input the integer identifiers\n# of a single user and a single item\nclass DeepRegressionModel(Model):\n def __init__(self, embedding_size, max_user_id, max_item_id):\n super().__init__()\n \n self.user_embedding = Embedding(output_dim=embedding_size, input_dim=max_user_id + 1,\n input_length=1, name='user_embedding')\n self.item_embedding = Embedding(output_dim=embedding_size, input_dim=max_item_id + 1,\n input_length=1, name='item_embedding')\n \n # The following two layers don't have parameters.\n self.flatten = Flatten()\n self.concat = Concatenate()\n \n self.dropout = Dropout(0.99)\n self.dense1 = Dense(64, activation=\"relu\")\n self.dense2 = Dense(2, activation=\"tanh\")\n \n def call(self, inputs):\n user_inputs = inputs[0]\n item_inputs = inputs[1]\n \n user_vecs = self.flatten(self.user_embedding(user_inputs))\n item_vecs = self.flatten(self.item_embedding(item_inputs))\n \n input_vecs = self.concat([user_vecs, item_vecs])\n \n y = self.dropout(input_vecs)\n y = self.dense1(y)\n y = self.dense2(y)\n \n return y\n \nmodel = DeepRegressionModel(30, max_user_id, max_item_id)\nmodel.compile(optimizer='adam', loss='binary_crossentropy')\n\ninitial_train_preds = model.predict([user_id_train, item_id_train])",
"WARNING:tensorflow:Large dropout rate: 0.99 (>0.5). In TensorFlow 2.x, dropout() uses dropout rate instead of keep_prob. Please ensure that this is intended.\n"
],
[
"# %load solutions/deep_explicit_feedback_recsys.py\n# For each sample we input the integer identifiers\n# of a single user and a single item\nclass DeepRegressionModel(Model):\n def __init__(self, embedding_size, max_user_id, max_item_id):\n super().__init__()\n\n self.user_embedding = Embedding(output_dim=embedding_size, input_dim=max_user_id + 1,\n input_length=1, name='user_embedding')\n self.item_embedding = Embedding(output_dim=embedding_size, input_dim=max_item_id + 1,\n input_length=1, name='item_embedding')\n\n # The following two layers don't have parameters.\n self.flatten = Flatten()\n self.concat = Concatenate()\n\n ## Error 1: Dropout was too high, preventing any training\n self.dropout = Dropout(0.5)\n self.dense1 = Dense(64, activation=\"relu\")\n ## Error 2: output dimension was 2 where we predict only 1-d rating\n ## Error 3: tanh activation squashes the outputs between -1 and 1\n ## when we want to predict values between 1 and 5\n self.dense2 = Dense(1)\n\n def call(self, inputs):\n user_inputs = inputs[0]\n item_inputs = inputs[1]\n\n user_vecs = self.flatten(self.user_embedding(user_inputs))\n item_vecs = self.flatten(self.item_embedding(item_inputs))\n\n input_vecs = self.concat([user_vecs, item_vecs])\n\n y = self.dropout(input_vecs)\n y = self.dense1(y)\n y = self.dense2(y)\n\n return y\n\nmodel = DeepRegressionModel(30, max_user_id, max_item_id)\n## Error 4: A binary crossentropy loss is only useful for binary\n## classification, while we are in regression (use mse or mae)\nmodel.compile(optimizer='adam', loss='mae')\n\ninitial_train_preds = model.predict([user_id_train, item_id_train])",
"_____no_output_____"
],
[
"%%time\nhistory = model.fit([user_id_train, item_id_train], rating_train,\n batch_size=64, epochs=5, validation_split=0.1,\n shuffle=True)",
"Train on 72000 samples, validate on 8000 samples\nEpoch 1/5\n72000/72000 [==============================] - 3s 48us/sample - loss: 0.9952 - val_loss: 0.7558\nEpoch 2/5\n72000/72000 [==============================] - 3s 42us/sample - loss: 0.7535 - val_loss: 0.7429\nEpoch 3/5\n72000/72000 [==============================] - 3s 43us/sample - loss: 0.7404 - val_loss: 0.7409\nEpoch 4/5\n72000/72000 [==============================] - 3s 39us/sample - loss: 0.7341 - val_loss: 0.7353\nEpoch 5/5\n72000/72000 [==============================] - 2s 34us/sample - loss: 0.7280 - val_loss: 0.7336\nCPU times: user 21.3 s, sys: 2.29 s, total: 23.6 s\nWall time: 14.9 s\n"
],
[
"plt.plot(history.history['loss'], label='train')\nplt.plot(history.history['val_loss'], label='validation')\nplt.ylim(0, 2)\nplt.legend(loc='best')\nplt.title('Loss');",
"_____no_output_____"
],
[
"train_preds = model.predict([user_id_train, item_id_train])\nprint(\"Final train MSE: %0.3f\" % mean_squared_error(train_preds, rating_train))\nprint(\"Final train MAE: %0.3f\" % mean_absolute_error(train_preds, rating_train))",
"Final train MSE: 0.870\nFinal train MAE: 0.701\n"
],
[
"test_preds = model.predict([user_id_test, item_id_test])\nprint(\"Final test MSE: %0.3f\" % mean_squared_error(test_preds, rating_test))\nprint(\"Final test MAE: %0.3f\" % mean_absolute_error(test_preds, rating_test))",
"Final test MSE: 0.919\nFinal test MAE: 0.727\n"
]
],
[
[
"### Home assignment: \n - Add another layer, compare train/test error\n - What do you notice? \n - Try adding more dropout and modifying layer sizes: should you increase\n or decrease the number of parameters",
"_____no_output_____"
],
[
"### Model Embeddings\n\n- It is possible to retrieve the embeddings by simply using the Keras function `model.get_weights` which returns all the model learnable parameters.\n- The weights are returned the same order as they were build in the model\n- What is the total number of parameters?",
"_____no_output_____"
]
],
[
[
"# weights and shape\nweights = model.get_weights()\n[w.shape for w in weights]",
"_____no_output_____"
],
[
"# Solution: \nmodel.summary()",
"Model: \"deep_reco_model_1\"\n_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\nuser_embedding (Embedding) multiple 28320 \n_________________________________________________________________\nitem_embedding (Embedding) multiple 50490 \n_________________________________________________________________\nflatten_2 (Flatten) multiple 0 \n_________________________________________________________________\nconcatenate_1 (Concatenate) multiple 0 \n_________________________________________________________________\ndropout_1 (Dropout) multiple 0 \n_________________________________________________________________\ndense_2 (Dense) multiple 3904 \n_________________________________________________________________\ndense_3 (Dense) multiple 65 \n=================================================================\nTotal params: 82,779\nTrainable params: 82,779\nNon-trainable params: 0\n_________________________________________________________________\n"
],
[
"user_embeddings = weights[0]\nitem_embeddings = weights[1]\nprint(\"First item name from metadata:\", items[\"title\"][1])\nprint(\"Embedding vector for the first item:\")\nprint(item_embeddings[1])\nprint(\"shape:\", item_embeddings[1].shape)",
"First item name from metadata: GoldenEye (1995)\nEmbedding vector for the first item:\n[ 0.04381735 -0.03758094 -0.09222507 0.00290344 0.02897644 -0.09132228\n -0.09790767 -0.0174721 -0.05984796 0.02196434 -0.00122383 0.09068132\n -0.04926297 -0.02939705 0.04170604 0.00770544 0.11523039 0.05105631\n 0.08297069 0.12759422 0.06236807 -0.08692449 -0.0158185 -0.00936573\n 0.02809385 -0.04604392 -0.04167301 -0.1034516 0.10239426 -0.01661406]\nshape: (30,)\n"
]
],
[
[
"### Finding most similar items\nFinding k most similar items to a point in embedding space\n\n- Write in numpy a function to compute the cosine similarity between two points in embedding space\n- Write a function which computes the euclidean distance between a point in embedding space and all other points\n- Write a most similar function, which returns the k item names with lowest euclidean distance\n- Try with a movie index, such as 181 (Return of the Jedi). What do you observe? Don't expect miracles on such a small training set.\n\nNotes:\n- you may use `np.linalg.norm` to compute the norm of vector, and you may specify the `axis=`\n- the numpy function `np.argsort(...)` enables to compute the sorted indices of a vector\n- `items[\"name\"][idxs]` returns the names of the items indexed by array idxs",
"_____no_output_____"
]
],
[
[
"# %load solutions/similarity.py\nEPSILON = 1e-07\n\ndef cosine(x, y):\n dot_pdt = np.dot(x, y.T)\n norms = np.linalg.norm(x) * np.linalg.norm(y)\n return dot_pdt / (norms + EPSILON)\n\n# Computes cosine similarities between x and all item embeddings\ndef cosine_similarities(x):\n dot_pdts = np.dot(item_embeddings, x)\n norms = np.linalg.norm(x) * np.linalg.norm(item_embeddings, axis=1)\n return dot_pdts / (norms + EPSILON)\n\n# Computes euclidean distances between x and all item embeddings\ndef euclidean_distances(x):\n return np.linalg.norm(item_embeddings - x, axis=1)\n\n# Computes top_n most similar items to an idx, \ndef most_similar(idx, top_n=10, mode='euclidean'):\n sorted_indexes=0\n if mode == 'euclidean':\n dists = euclidean_distances(item_embeddings[idx])\n sorted_indexes = np.argsort(dists)\n idxs = sorted_indexes[0:top_n]\n return list(zip(items[\"title\"][idxs], dists[idxs]))\n else:\n sims = cosine_similarities(item_embeddings[idx])\n # [::-1] makes it possible to reverse the order of a numpy\n # array, this is required because most similar items have\n # a larger cosine similarity value\n sorted_indexes = np.argsort(sims)[::-1]\n idxs = sorted_indexes[0:top_n]\n return list(zip(items[\"title\"][idxs], sims[idxs]))\n\n# sanity checks:\nprint(\"cosine of item 1 and item 1: %0.3f\"\n % cosine(item_embeddings[1], item_embeddings[1]))\neuc_dists = euclidean_distances(item_embeddings[1])\nprint(euc_dists.shape)\nprint(euc_dists[1:5])\nprint()\n\n# Test on movie 181: Return of the Jedi\nprint(\"Items closest to 'Return of the Jedi':\")\nfor title, dist in most_similar(181, mode=\"euclidean\"):\n print(title, dist)\n\n\n# We observe that the embedding is poor at representing similarities\n# between movies, as most distance/similarities are very small/big \n# One may notice a few clusters though\n# it's interesting to plot the following distributions\n# plt.hist(euc_dists)\n\n# The reason for that is that the number of ratings is low and the embedding\n# does not automatically capture semantic relationships in that context. \n# Better representations arise with higher number of ratings, and less overfitting\n# in models or maybe better loss function, such as those based on implicit\n# feedback.\n",
"cosine of item 1 and item 1: 1.000\n(1683,)\n[0. 0.47529566 0.5354877 0.3661683 ]\n\nItems closest to 'Return of the Jedi':\nGoodFellas (1990) 0.0\nPrivate Benjamin (1980) 0.19624963\nGiant (1956) 0.20619841\nThis Is Spinal Tap (1984) 0.21406585\nSteel (1997) 0.2165023\nPeople vs. Larry Flynt, The (1996) 0.21659239\nStargate (1994) 0.21687394\nTurbo: A Power Rangers Movie (1997) 0.21891865\nTin Drum, The (Blechtrommel, Die) (1979) 0.22192156\nCat on a Hot Tin Roof (1958) 0.22819139\n"
]
],
[
[
"### Visualizing embeddings using TSNE\n\n- we use scikit learn to visualize items embeddings\n- Try different perplexities, and visualize user embeddings as well\n- What can you conclude ?",
"_____no_output_____"
]
],
[
[
"from sklearn.manifold import TSNE\n\nitem_tsne = TSNE(perplexity=30).fit_transform(item_embeddings)",
"_____no_output_____"
],
[
"import matplotlib.pyplot as plt\n\nplt.figure(figsize=(10, 10))\nplt.scatter(item_tsne[:, 0], item_tsne[:, 1]);\nplt.xticks(()); plt.yticks(());\nplt.show()",
"_____no_output_____"
]
],
[
[
"Alternatively with [Uniform Manifold Approximation and Projection](https://github.com/lmcinnes/umap):",
"_____no_output_____"
]
],
[
[
"!pip install umap-learn",
"Collecting umap-learn\n Downloading https://files.pythonhosted.org/packages/ad/92/36bac74962b424870026cb0b42cec3d5b6f4afa37d81818475d8762f9255/umap-learn-0.3.10.tar.gz (40kB)\n\u001b[K 100% |████████████████████████████████| 40kB 1.5MB/s ta 0:00:01\n\u001b[?25hRequirement already satisfied: numpy>=1.13 in /Users/arthurdouillard/code/lectures-labs/env/lib/python3.6/site-packages (from umap-learn)\nRequirement already satisfied: scikit-learn>=0.16 in /Users/arthurdouillard/code/lectures-labs/env/lib/python3.6/site-packages (from umap-learn)\nRequirement already satisfied: scipy>=0.19 in /Users/arthurdouillard/code/lectures-labs/env/lib/python3.6/site-packages (from umap-learn)\nCollecting numba>=0.37 (from umap-learn)\n Downloading https://files.pythonhosted.org/packages/42/ba/d111c67e175545053debe298a7510f01d3b345492dfae3a1397a36c27d56/numba-0.46.0-cp36-cp36m-macosx_10_9_x86_64.whl (2.0MB)\n\u001b[K 100% |████████████████████████████████| 2.0MB 719kB/s ta 0:00:011\n\u001b[?25hRequirement already satisfied: joblib>=0.11 in /Users/arthurdouillard/code/lectures-labs/env/lib/python3.6/site-packages (from scikit-learn>=0.16->umap-learn)\nCollecting llvmlite>=0.30.0dev0 (from numba>=0.37->umap-learn)\n Downloading https://files.pythonhosted.org/packages/b4/6e/9dfac2466a7c920acb85ef6a061d8453b851bb06e5c8ac6cc6cdaf2e37c4/llvmlite-0.30.0-cp36-cp36m-macosx_10_9_x86_64.whl (15.9MB)\n\u001b[K 100% |████████████████████████████████| 15.9MB 95kB/s eta 0:00:011 34% |███████████ | 5.5MB 5.3MB/s eta 0:00:02\n\u001b[?25hBuilding wheels for collected packages: umap-learn\n Running setup.py bdist_wheel for umap-learn ... \u001b[?25ldone\n\u001b[?25h Stored in directory: /Users/arthurdouillard/Library/Caches/pip/wheels/d0/f8/d5/8e3af3ee957feb9b403a060ebe72f7561887fef9dea658326e\nSuccessfully built umap-learn\nInstalling collected packages: llvmlite, numba, umap-learn\nSuccessfully installed llvmlite-0.30.0 numba-0.46.0 umap-learn-0.3.10\n\u001b[33mYou are using pip version 9.0.3, however version 19.3.1 is available.\nYou should consider upgrading via the 'pip install --upgrade pip' command.\u001b[0m\n"
],
[
"import umap\n\nitem_umap = umap.UMAP().fit_transform(item_embeddings)\nplt.figure(figsize=(10, 10))\nplt.scatter(item_umap[:, 0], item_umap[:, 1]);\nplt.xticks(()); plt.yticks(());\nplt.show()",
"_____no_output_____"
]
],
[
[
"## Using item metadata in the model\n\nUsing a similar framework as previously, we will build another deep model that can also leverage additional metadata. The resulting system is therefore an **Hybrid Recommender System** that does both **Collaborative Filtering** and **Content-based recommendations**.\n\n<img src=\"images/rec_archi_3.svg\" style=\"width: 600px;\" />\n",
"_____no_output_____"
]
],
[
[
"from sklearn.preprocessing import QuantileTransformer\n\nmeta_columns = ['popularity', 'release_year']\n\nscaler = QuantileTransformer()\nitem_meta_train = scaler.fit_transform(ratings_train[meta_columns])\nitem_meta_test = scaler.transform(ratings_test[meta_columns])",
"_____no_output_____"
],
[
"class HybridModel(Model):\n def __init__(self, embedding_size, max_user_id, max_item_id):\n super().__init__()\n \n self.user_embedding = Embedding(output_dim=embedding_size, input_dim=max_user_id + 1,\n input_length=1, name='user_embedding')\n self.item_embedding = Embedding(output_dim=embedding_size, input_dim=max_item_id + 1,\n input_length=1, name='item_embedding')\n \n # The following two layers don't have parameters.\n self.flatten = Flatten()\n self.concat = Concatenate()\n \n self.dense1 = Dense(64, activation=\"relu\")\n self.dropout = Dropout(0.5)\n self.dense2 = Dense(32, activation='relu')\n self.dense3 = Dense(2, activation=\"tanh\")\n \n def call(self, inputs):\n user_inputs = inputs[0]\n item_inputs = inputs[1]\n meta_inputs = inputs[2]\n \n user_vecs = self.flatten(self.user_embedding(user_inputs))\n item_vecs = self.flatten(self.item_embedding(item_inputs))\n \n input_vecs = self.concat([user_vecs, item_vecs, meta_inputs])\n \n y = self.dense1(input_vecs)\n y = self.dropout(y)\n y = self.dense2(y)\n y = self.dense3(y)\n \n return y\n \nmodel = DeepRecoModel(30, max_user_id, max_item_id)\nmodel.compile(optimizer='adam', loss='mae')\n\ninitial_train_preds = model.predict([user_id_train, item_id_train, item_meta_train])",
"_____no_output_____"
],
[
"%%time\nhistory = model.fit([user_id_train, item_id_train, item_meta_train], rating_train,\n batch_size=64, epochs=15, validation_split=0.1,\n shuffle=True)",
"Train on 72000 samples, validate on 8000 samples\nEpoch 1/15\n72000/72000 [==============================] - 3s 43us/sample - loss: 1.0460 - val_loss: 0.7590\nEpoch 2/15\n72000/72000 [==============================] - 2s 34us/sample - loss: 0.7566 - val_loss: 0.7452\nEpoch 3/15\n72000/72000 [==============================] - 2s 35us/sample - loss: 0.7398 - val_loss: 0.7394\nEpoch 4/15\n72000/72000 [==============================] - 2s 32us/sample - loss: 0.7332 - val_loss: 0.7383\nEpoch 5/15\n72000/72000 [==============================] - 2s 34us/sample - loss: 0.7277 - val_loss: 0.7334\nEpoch 6/15\n72000/72000 [==============================] - 2s 33us/sample - loss: 0.7202 - val_loss: 0.7274\nEpoch 7/15\n72000/72000 [==============================] - 3s 37us/sample - loss: 0.7135 - val_loss: 0.7234\nEpoch 8/15\n72000/72000 [==============================] - 3s 36us/sample - loss: 0.7101 - val_loss: 0.7221\nEpoch 9/15\n72000/72000 [==============================] - 2s 32us/sample - loss: 0.7059 - val_loss: 0.7187\nEpoch 10/15\n72000/72000 [==============================] - 2s 32us/sample - loss: 0.7004 - val_loss: 0.7212\nEpoch 11/15\n72000/72000 [==============================] - 2s 35us/sample - loss: 0.6977 - val_loss: 0.7229\nEpoch 12/15\n72000/72000 [==============================] - 2s 32us/sample - loss: 0.6947 - val_loss: 0.7160\nEpoch 13/15\n72000/72000 [==============================] - 2s 32us/sample - loss: 0.6908 - val_loss: 0.7206\nEpoch 14/15\n72000/72000 [==============================] - 2s 34us/sample - loss: 0.6906 - val_loss: 0.7202\nEpoch 15/15\n72000/72000 [==============================] - 2s 32us/sample - loss: 0.6903 - val_loss: 0.7218\nCPU times: user 56.1 s, sys: 5.83 s, total: 1min 1s\nWall time: 37.2 s\n"
],
[
"test_preds = model.predict([user_id_test, item_id_test, item_meta_test])\nprint(\"Final test MSE: %0.3f\" % mean_squared_error(test_preds, rating_test))\nprint(\"Final test MAE: %0.3f\" % mean_absolute_error(test_preds, rating_test))",
"Final test MSE: 0.904\nFinal test MAE: 0.714\n"
]
],
[
[
"The additional metadata seem to improve the predictive power of the model a bit at least in terms of MAE.\n\n\n### A recommendation function for a given user\n\nOnce the model is trained, the system can be used to recommend a few items for a user, that he/she hasn't already seen:\n- we use the `model.predict` to compute the ratings a user would have given to all items\n- we build a reco function that sorts these items and exclude those the user has already seen",
"_____no_output_____"
]
],
[
[
"indexed_items = items.set_index('item_id')\n\n\ndef recommend(user_id, top_n=10):\n item_ids = range(1, max_item_id)\n seen_mask = all_ratings[\"user_id\"] == user_id\n seen_movies = set(all_ratings[seen_mask][\"item_id\"])\n item_ids = list(filter(lambda x: x not in seen_movies, item_ids))\n\n print(\"User %d has seen %d movies, including:\" % (user_id, len(seen_movies)))\n for title in all_ratings[seen_mask].nlargest(20, 'popularity')['title']:\n print(\" \", title)\n print(\"Computing ratings for %d other movies:\" % len(item_ids))\n \n item_ids = np.array(item_ids)\n user_ids = np.zeros_like(item_ids)\n user_ids[:] = user_id\n items_meta = scaler.transform(indexed_items[meta_columns].loc[item_ids])\n \n rating_preds = model.predict([user_ids, item_ids, items_meta])\n \n item_ids = np.argsort(rating_preds[:, 0])[::-1].tolist()\n rec_items = item_ids[:top_n]\n return [(items[\"title\"][movie], rating_preds[movie][0])\n for movie in rec_items]",
"_____no_output_____"
],
[
"for title, pred_rating in recommend(5):\n print(\" %0.1f: %s\" % (pred_rating, title))",
"User 5 has seen 175 movies, including:\n Star Wars (1977)\n Fargo (1996)\n Return of the Jedi (1983)\n Toy Story (1995)\n Independence Day (ID4) (1996)\n Raiders of the Lost Ark (1981)\n Silence of the Lambs, The (1991)\n Empire Strikes Back, The (1980)\n Star Trek: First Contact (1996)\n Back to the Future (1985)\n Mission: Impossible (1996)\n Fugitive, The (1993)\n Indiana Jones and the Last Crusade (1989)\n Willy Wonka and the Chocolate Factory (1971)\n Princess Bride, The (1987)\n Forrest Gump (1994)\n Monty Python and the Holy Grail (1974)\n Men in Black (1997)\n E.T. the Extra-Terrestrial (1982)\n Birdcage, The (1996)\nComputing ratings for 1506 other movies:\n 4.4: Simple Wish, A (1997)\n 4.4: Robocop 3 (1993)\n 4.4: FairyTale: A True Story (1997)\n 4.4: Raising Arizona (1987)\n 4.3: Romy and Michele's High School Reunion (1997)\n 4.3: Madness of King George, The (1994)\n 4.3: Playing God (1997)\n 4.3: Richard III (1995)\n 4.2: Ref, The (1994)\n 4.2: Hoodlum (1997)\n"
]
],
[
[
"### Home assignment: Predicting ratings as a classification problem\n\nIn this dataset, the ratings all belong to a finite set of possible values:",
"_____no_output_____"
]
],
[
[
"import numpy as np\n\nnp.unique(rating_train)",
"_____no_output_____"
]
],
[
[
"Maybe we can help the model by forcing it to predict those values by treating the problem as a multiclassification problem. The only required changes are:\n\n- setting the final layer to output class membership probabities using a softmax activation with 5 outputs;\n- optimize the categorical cross-entropy classification loss instead of a regression loss such as MSE or MAE.",
"_____no_output_____"
]
],
[
[
"# %load solutions/classification.py\nclass ClassificationModel(Model):\n def __init__(self, embedding_size, max_user_id, max_item_id):\n super().__init__()\n\n self.user_embedding = Embedding(output_dim=embedding_size, input_dim=max_user_id + 1,\n input_length=1, name='user_embedding')\n self.item_embedding = Embedding(output_dim=embedding_size, input_dim=max_item_id + 1,\n input_length=1, name='item_embedding')\n\n # The following two layers don't have parameters.\n self.flatten = Flatten()\n self.concat = Concatenate()\n\n self.dropout1 = Dropout(0.5)\n self.dense1 = Dense(128, activation=\"relu\")\n self.dropout2 = Dropout(0.2)\n self.dense2 = Dense(128, activation='relu')\n self.dense3 = Dense(5, activation=\"softmax\")\n\n def call(self, inputs):\n user_inputs = inputs[0]\n item_inputs = inputs[1]\n\n user_vecs = self.flatten(self.user_embedding(user_inputs))\n item_vecs = self.flatten(self.item_embedding(item_inputs))\n\n input_vecs = self.concat([user_vecs, item_vecs])\n\n y = self.dropout1(input_vecs)\n y = self.dense1(y)\n y = self.dropout2(y)\n y = self.dense2(y)\n y = self.dense3(y)\n\n return y\n\nmodel = ClassificationModel(16, max_user_id, max_item_id)\nmodel.compile(optimizer='adam', loss='sparse_categorical_crossentropy')\n\ninitial_train_preds = model.predict([user_id_train, item_id_train]).argmax(axis=1) + 1\nprint(\"Random init MSE: %0.3f\" % mean_squared_error(initial_train_preds, rating_train))\nprint(\"Random init MAE: %0.3f\" % mean_absolute_error(initial_train_preds, rating_train))\n\nhistory = model.fit([user_id_train, item_id_train], rating_train - 1,\n batch_size=64, epochs=15, validation_split=0.1,\n shuffle=True)\n\nplt.plot(history.history['loss'], label='train')\nplt.plot(history.history['val_loss'], label='validation')\nplt.ylim(0, 2)\nplt.legend(loc='best')\nplt.title('loss');\n\ntest_preds = model.predict([user_id_test, item_id_test]).argmax(axis=1) + 1\nprint(\"Final test MSE: %0.3f\" % mean_squared_error(test_preds, rating_test))\nprint(\"Final test MAE: %0.3f\" % mean_absolute_error(test_preds, rating_test))\n",
"Random init MSE: 2.106\nRandom init MAE: 1.070\nTrain on 72000 samples, validate on 8000 samples\nEpoch 1/15\n72000/72000 [==============================] - 3s 47us/sample - loss: 1.3656 - val_loss: 1.2794\nEpoch 2/15\n72000/72000 [==============================] - 3s 38us/sample - loss: 1.2821 - val_loss: 1.2615\nEpoch 3/15\n72000/72000 [==============================] - 3s 44us/sample - loss: 1.2644 - val_loss: 1.2542\nEpoch 4/15\n72000/72000 [==============================] - 3s 37us/sample - loss: 1.2512 - val_loss: 1.2463\nEpoch 5/15\n72000/72000 [==============================] - 4s 51us/sample - loss: 1.2438 - val_loss: 1.2494\nEpoch 6/15\n72000/72000 [==============================] - 3s 40us/sample - loss: 1.2366 - val_loss: 1.2441\nEpoch 7/15\n72000/72000 [==============================] - 3s 38us/sample - loss: 1.2330 - val_loss: 1.2442\nEpoch 8/15\n72000/72000 [==============================] - 2s 28us/sample - loss: 1.2293 - val_loss: 1.2425\nEpoch 9/15\n72000/72000 [==============================] - 2s 28us/sample - loss: 1.2257 - val_loss: 1.2413\nEpoch 10/15\n72000/72000 [==============================] - 2s 28us/sample - loss: 1.2215 - val_loss: 1.2412\nEpoch 11/15\n72000/72000 [==============================] - 2s 28us/sample - loss: 1.2213 - val_loss: 1.2414\nEpoch 12/15\n72000/72000 [==============================] - 2s 31us/sample - loss: 1.2165 - val_loss: 1.2406\nEpoch 13/15\n72000/72000 [==============================] - 2s 30us/sample - loss: 1.2163 - val_loss: 1.2395\nEpoch 14/15\n72000/72000 [==============================] - 2s 33us/sample - loss: 1.2129 - val_loss: 1.2435\nEpoch 15/15\n72000/72000 [==============================] - 2s 34us/sample - loss: 1.2101 - val_loss: 1.2392\nFinal test MSE: 1.147\nFinal test MAE: 0.716\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a648a21bb034ae9eaf5e798c3f65b57c47bbd6f
| 31,466 |
ipynb
|
Jupyter Notebook
|
.ipynb_checkpoints/dmr_round3-checkpoint.ipynb
|
mukamel-lab/SingleCellFusion_EnhancerPaper
|
acbfa5184667ca57c333c04c310b0712a0e8e15e
|
[
"MIT"
] | null | null | null |
.ipynb_checkpoints/dmr_round3-checkpoint.ipynb
|
mukamel-lab/SingleCellFusion_EnhancerPaper
|
acbfa5184667ca57c333c04c310b0712a0e8e15e
|
[
"MIT"
] | null | null | null |
.ipynb_checkpoints/dmr_round3-checkpoint.ipynb
|
mukamel-lab/SingleCellFusion_EnhancerPaper
|
acbfa5184667ca57c333c04c310b0712a0e8e15e
|
[
"MIT"
] | 1 |
2021-11-15T19:03:03.000Z
|
2021-11-15T19:03:03.000Z
| 43.401379 | 227 | 0.484745 |
[
[
[
"import sys\nsys.path.insert(0, '/cndd/fangming/CEMBA/snmcseq_dev')\n\nfrom __init__ import *\nfrom snmcseq_utils import cd\nfrom snmcseq_utils import create_logger\n# from CEMBA_update_mysql import connect_sql",
"_____no_output_____"
],
[
"log = create_logger()\nlog.info(\"Hello\")",
"02/26/2020 09:00:32 PM Hello\n"
],
[
"# \ninput_f = '/cndd2/fangming/projects/scf_enhancers/data/bulk/round2/mc/dmr/cgdmr_round2_rms_results_collapsed.tsv'\ndf = pd.read_table(input_f, index_col=['#chr', 'start', 'end'], dtype={'#chr': object})\nprint(df.shape)\ndf.head()",
"/cndd/fangming/venvs/conda_dobro/lib/python3.6/site-packages/ipykernel_launcher.py:3: FutureWarning: read_table is deprecated, use read_csv instead, passing sep='\\t'.\n This is separate from the ipykernel package so we can avoid doing imports until\n/cndd/fangming/venvs/conda_dobro/lib/python3.6/site-packages/numpy/lib/arraysetops.py:463: FutureWarning: elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison\n mask |= (ar1 == a)\n"
],
[
"print(df.shape)\n\ndf_hypo = df.loc[(df['number_of_dms']>=3) & (~df['hypomethylated_samples'].isnull()), 'hypomethylated_samples'].apply(lambda x: x.split(','))\ndf_hyper = df.loc[(df['number_of_dms']>=3) & (~df['hypermethylated_samples'].isnull()), 'hypermethylated_samples'].apply(lambda x: x.split(','))\n\nprint(df_hypo.shape)\nprint(df_hyper.shape)",
"(1460167, 32)\n(592542,)\n(519496,)\n"
],
[
"df_hypo.head()\n# df_hypo_cluster = df_hypo.loc[\n# df_hypo.apply(lambda x: ('merged_mCG_{}_{}_{}'.format(cluster_type, i+1, ens) in x))]",
"_____no_output_____"
],
[
"outdir = '/cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr'\nif not os.path.isdir(outdir):\n os.makedirs(outdir)\n logging.info('Created directory {}'.format(outdir))\n \nclsts = [i[len('methylation_level_'):] for i in df.filter(regex='^methylation_level_*').columns]\nfor clst in clsts:\n df_hypo_cluster = df_hypo[df_hypo.apply(lambda x: (clst in x))]\n output = os.path.join(outdir, 'dmr_{}.bed'.format(clst))\n df_hypo_cluster.to_csv(output, sep='\\t', header=False, index=True, na_rep='NA')\n logging.info(\"Saved to {}\".format(output))\n logging.info(df_hypo_cluster.shape)",
"01/23/2020 02:07:37 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_1-1.bed\n01/23/2020 02:07:37 PM (207413,)\n01/23/2020 02:07:40 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_1-2.bed\n01/23/2020 02:07:40 PM (258769,)\n01/23/2020 02:07:41 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_10-1.bed\n01/23/2020 02:07:41 PM (18082,)\n01/23/2020 02:07:44 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_2-1.bed\n01/23/2020 02:07:44 PM (234123,)\n01/23/2020 02:07:46 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_2-2.bed\n01/23/2020 02:07:46 PM (141155,)\n01/23/2020 02:07:48 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_2-3.bed\n01/23/2020 02:07:48 PM (153768,)\n01/23/2020 02:07:51 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_3-1.bed\n01/23/2020 02:07:51 PM (196704,)\n01/23/2020 02:07:53 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_3-2.bed\n01/23/2020 02:07:53 PM (172013,)\n01/23/2020 02:07:54 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_3-3.bed\n01/23/2020 02:07:54 PM (40258,)\n01/23/2020 02:07:57 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_4-1.bed\n01/23/2020 02:07:57 PM (246202,)\n01/23/2020 02:08:01 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_4-2.bed\n01/23/2020 02:08:01 PM (328382,)\n01/23/2020 02:08:03 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_5-1.bed\n01/23/2020 02:08:03 PM (137368,)\n01/23/2020 02:08:04 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_5-2.bed\n01/23/2020 02:08:04 PM (76738,)\n01/23/2020 02:08:05 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_5-3.bed\n01/23/2020 02:08:05 PM (78406,)\n01/23/2020 02:08:06 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_5-4.bed\n01/23/2020 02:08:06 PM (17966,)\n01/23/2020 02:08:07 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_5-5.bed\n01/23/2020 02:08:07 PM (13895,)\n01/23/2020 02:08:08 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_6-1.bed\n01/23/2020 02:08:08 PM (98997,)\n01/23/2020 02:08:11 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_6-2.bed\n01/23/2020 02:08:11 PM (218602,)\n01/23/2020 02:08:13 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_6-3.bed\n01/23/2020 02:08:13 PM (84450,)\n01/23/2020 02:08:15 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_6-4.bed\n01/23/2020 02:08:15 PM (122690,)\n01/23/2020 02:08:16 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_6-5.bed\n01/23/2020 02:08:16 PM (70594,)\n01/23/2020 02:08:17 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_6-6.bed\n01/23/2020 02:08:17 PM (20212,)\n01/23/2020 02:08:18 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_7-1.bed\n01/23/2020 02:08:18 PM (43227,)\n01/23/2020 02:08:19 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_7-2.bed\n01/23/2020 02:08:19 PM (32824,)\n01/23/2020 02:08:20 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_8-1.bed\n01/23/2020 02:08:20 PM (32424,)\n01/23/2020 02:08:21 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_8-2.bed\n01/23/2020 02:08:21 PM (27033,)\n01/23/2020 02:08:22 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_8-3.bed\n01/23/2020 02:08:22 PM (4271,)\n01/23/2020 02:08:23 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_9-1.bed\n01/23/2020 02:08:23 PM (69381,)\n01/23/2020 02:08:24 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_191211/dmr/dmr_9-2.bed\n01/23/2020 02:08:24 PM (64315,)\n"
],
[
"outdir = '/cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr'\nif not os.path.isdir(outdir):\n os.makedirs(outdir)\n logging.info('Created directory {}'.format(outdir))\n \nclsts = [i[len('methylation_level_'):] for i in df.filter(regex='^methylation_level_*').columns]\nfor clst in clsts:\n df_hyper_cluster = df_hyper[df_hyper.apply(lambda x: (clst in x))]\n output = os.path.join(outdir, 'dmr_{}.bed'.format(clst))\n df_hyper_cluster.to_csv(output, sep='\\t', header=False, index=True, na_rep='NA')\n logging.info(\"Saved to {}\".format(output))\n logging.info(df_hyper_cluster.shape)",
"02/26/2020 09:03:09 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_1-1.bed\n02/26/2020 09:03:09 PM (61715,)\n02/26/2020 09:03:10 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_1-2.bed\n02/26/2020 09:03:10 PM (98223,)\n02/26/2020 09:03:11 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_10-1.bed\n02/26/2020 09:03:11 PM (20988,)\n02/26/2020 09:03:13 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_2-1.bed\n02/26/2020 09:03:13 PM (175655,)\n02/26/2020 09:03:15 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_2-2.bed\n02/26/2020 09:03:15 PM (148417,)\n02/26/2020 09:03:16 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_2-3.bed\n02/26/2020 09:03:16 PM (74596,)\n02/26/2020 09:03:17 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_3-1.bed\n02/26/2020 09:03:17 PM (81410,)\n02/26/2020 09:03:18 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_3-2.bed\n02/26/2020 09:03:18 PM (46785,)\n02/26/2020 09:03:19 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_3-3.bed\n02/26/2020 09:03:19 PM (12001,)\n02/26/2020 09:03:20 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_4-1.bed\n02/26/2020 09:03:20 PM (46383,)\n02/26/2020 09:03:22 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_4-2.bed\n02/26/2020 09:03:22 PM (122035,)\n02/26/2020 09:03:26 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_5-1.bed\n02/26/2020 09:03:26 PM (326809,)\n02/26/2020 09:03:29 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_5-2.bed\n02/26/2020 09:03:29 PM (228209,)\n02/26/2020 09:03:32 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_5-3.bed\n02/26/2020 09:03:32 PM (268100,)\n02/26/2020 09:03:33 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_5-4.bed\n02/26/2020 09:03:33 PM (12494,)\n02/26/2020 09:03:34 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_5-5.bed\n02/26/2020 09:03:34 PM (6855,)\n02/26/2020 09:03:36 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_6-1.bed\n02/26/2020 09:03:36 PM (161607,)\n02/26/2020 09:03:38 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_6-2.bed\n02/26/2020 09:03:38 PM (176213,)\n02/26/2020 09:03:40 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_6-3.bed\n02/26/2020 09:03:40 PM (142922,)\n02/26/2020 09:03:42 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_6-4.bed\n02/26/2020 09:03:42 PM (113467,)\n02/26/2020 09:03:44 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_6-5.bed\n02/26/2020 09:03:44 PM (124268,)\n02/26/2020 09:03:45 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_6-6.bed\n02/26/2020 09:03:45 PM (3461,)\n02/26/2020 09:03:47 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_7-1.bed\n02/26/2020 09:03:47 PM (225936,)\n02/26/2020 09:03:49 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_7-2.bed\n02/26/2020 09:03:49 PM (130635,)\n02/26/2020 09:03:50 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_8-1.bed\n02/26/2020 09:03:50 PM (31154,)\n02/26/2020 09:03:51 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_8-2.bed\n02/26/2020 09:03:51 PM (68827,)\n02/26/2020 09:03:52 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_8-3.bed\n02/26/2020 09:03:52 PM (2460,)\n02/26/2020 09:03:53 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_9-1.bed\n02/26/2020 09:03:53 PM (7061,)\n02/26/2020 09:03:54 PM Saved to /cndd2/fangming/projects/scf_enhancers/enhancer_round2_hyper_200226/dmr/dmr_9-2.bed\n02/26/2020 09:03:54 PM (9329,)\n"
]
],
[
[
"# Not organized ",
"_____no_output_____"
]
],
[
[
"with cd(os.path.join(PATH_ENSEMBLES, ens, 'dmr')):\n f = 'dmr_allc_merged_mCG_cluster_mCHmCG_lv_npc50_k30_rms_results_collapsed.tsv.DMR.bed'\n df_bed = pd.read_table(f, header=None, names=['chr', 'start', 'end', 'num_dms'])\nprint(df_bed.shape)\ndf_bed.head()",
"(1162175, 4)\n"
],
[
"df_out = df_bed[df_bed['num_dms']>=20]\nwith cd(os.path.join(PATH_ENSEMBLES, ens, 'dmr')):\n df_out.to_csv('dmr_allc_merged_mCG_cluster_mCHmCG_lv_npc50_k30_rms_results_collapsed_20dms.tsv.DMR.bed', \n sep='\\t', na_rep='NA', header=False, index=False)",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
4a64a2d8959fd9041d634373eb14db2c836b5d4a
| 528,210 |
ipynb
|
Jupyter Notebook
|
Random Forest/COVID_19_Forcast.ipynb
|
DKMalungu/COVID-19
|
5876242290c45c5bd7887ba74055600f9665f1bf
|
[
"MIT"
] | null | null | null |
Random Forest/COVID_19_Forcast.ipynb
|
DKMalungu/COVID-19
|
5876242290c45c5bd7887ba74055600f9665f1bf
|
[
"MIT"
] | null | null | null |
Random Forest/COVID_19_Forcast.ipynb
|
DKMalungu/COVID-19
|
5876242290c45c5bd7887ba74055600f9665f1bf
|
[
"MIT"
] | null | null | null | 58.781438 | 13,205 | 0.500025 |
[
[
[
"# Loading Libraries",
"_____no_output_____"
]
],
[
[
"# Importing the core libraies\nimport numpy as np \nimport pandas as pd \nfrom IPython.display import Markdown\nfrom datetime import timedelta\nfrom datetime import datetime\n\nimport plotly.express as px\nimport plotly.graph_objs as go\n!pip install pycountry\nimport pycountry\nfrom plotly.offline import init_notebook_mode, iplot \nimport plotly.offline as py\nimport plotly.express as ex\nfrom plotly.offline import download_plotlyjs,init_notebook_mode,plot,iplot\nimport matplotlib.pyplot as plt\npy.init_notebook_mode(connected=True)\nplt.style.use(\"seaborn-talk\")\nplt.rcParams['figure.figsize'] = 8, 5\nplt.rcParams['image.cmap'] = 'viridis'\nimport folium\n#!pip install ipympl\n%matplotlib notebook\n\nfrom fbprophet import Prophet\nfrom fbprophet.plot import plot_plotly\n\npd.set_option('display.max_rows', None)\nfrom math import sin, cos, sqrt, atan2, radians\nfrom warnings import filterwarnings\nfilterwarnings('ignore')\nfrom sklearn import preprocessing\nfrom xgboost import XGBRegressor\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")",
"Requirement already satisfied: pycountry in /usr/local/lib/python3.6/dist-packages (19.8.18)\n"
],
[
"# from google.colab import drive\n# drive.mount('/content/drive')",
"Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n"
],
[
"# Importing the dataset into python\n#df = pd.read_csv('./covid19-global-forecasting-week-5/covid_19_data.csv',parse_dates=['ObservationDate'])\ndf = pd.read_csv('/content/drive/My Drive/Master Online Exams/Dataset/covid_19_data.csv',parse_dates=['ObservationDate'])\ndf.drop(['SNo','Last Update'],axis =1, inplace = True)\ndf['Active'] = df['Confirmed'] - (df['Recovered'] + df['Deaths'])",
"_____no_output_____"
],
[
"#week5_train = pd.read_csv('./covid19-global-forecasting-week/train.csv')\n#week5_test = pd.read_csv('./covid19-global-forecasting-week-5/test.csv')\n#dataset=pd.read_csv('./covid19-global-forecasting-week-5/covid_19_complete.csv',parse_dates=['Date'])\nweek5_train = pd.read_csv('/content/drive/My Drive/Master Online Exams/Dataset/train.csv')\nweek5_test = pd.read_csv('/content/drive/My Drive/Master Online Exams/Dataset/test.csv')\ndataset=pd.read_csv('/content/drive/My Drive/Master Online Exams/Dataset/covid_19_complete.csv',parse_dates=['Date'])",
"_____no_output_____"
]
],
[
[
"# Data Cleaning",
"_____no_output_____"
]
],
[
[
"# Dataset Cleaning\n# Creating a colunm for active cases\ndataset['Confirmed'] = dataset['Confirmed'].astype(int)\ndataset['Active'] = dataset['Confirmed'] - dataset['Deaths'] - dataset['Recovered']\n# Replacing the word Mainland China with china\ndataset['Country/Region'] = dataset['Country/Region'].replace('Mainland China','China')\n#Filling Missing values\ndataset[['Province/State']] = dataset[['Province/State']].fillna('')\ndataset[['Confirmed', 'Deaths', 'Recovered', 'Active']] = dataset[['Confirmed', 'Deaths', 'Recovered', 'Active']].fillna(0)\n# Datatypes\ndataset['Recovered'] = dataset['Recovered'].astype(int)",
"_____no_output_____"
],
[
"# Creating Grouped dataset \ndataset_grouped = dataset.groupby(['Date','Country/Region'])['Confirmed','Deaths','Recovered','Active'].sum().reset_index()\n\n# New cases \ntemp = dataset_grouped.groupby(['Country/Region', 'Date', ])['Confirmed', 'Deaths', 'Recovered']\ntemp = temp.sum().diff().reset_index()\nmask = temp['Country/Region'] != temp['Country/Region'].shift(1)\ntemp.loc[mask, 'Confirmed'] = np.nan\ntemp.loc[mask, 'Deaths'] = np.nan\ntemp.loc[mask, 'Recovered'] = np.nan\n\n#Renaming columns\ntemp.columns = ['Country/Region', 'Date', 'New cases', 'New deaths', 'New recovered']\n# Merging new values\n# Dataset_grouped = pd.merge(dataset_grouped, temp, on=['Country/Region', 'Date'])\ndataset_grouped = pd.merge(dataset_grouped, temp, on=['Country/Region', 'Date'])\n# Filling na with 0\ndataset_grouped = dataset_grouped.fillna(0)\n# Fixing data types\ncols = ['New cases', 'New deaths', 'New recovered']\ndataset_grouped[cols] = dataset_grouped[cols].astype('int')\ndataset_grouped['New cases'] = dataset_grouped['New cases'].apply(lambda x: 0 if x<0 else x)",
"_____no_output_____"
],
[
"# Country data grouping\ncountry_wise = dataset_grouped[dataset_grouped['Date']==max(dataset_grouped['Date'])].reset_index(drop=True).drop('Date',axis=1)\n\n#Grouped by Country\ncountry_wise = country_wise.groupby('Country/Region')['Confirmed','Deaths','Recovered','Active','New cases'].sum().reset_index()\n\n# Grouped per 100 cases\ncountry_wise['Deaths / 100 Cases'] = round((country_wise['Deaths']/country_wise['Confirmed'])*100,2)\ncountry_wise['Recovered / 100 cases'] = round((country_wise['Recovered']/country_wise['Confirmed'])*100,2)\ncountry_wise['Deaths / 100 Recovered'] = round((country_wise['Deaths']/country_wise['Recovered'])*100,2)\ncols= ['Deaths / 100 Cases','Recovered / 100 cases','Deaths / 100 Recovered']\ncountry_wise[cols] = country_wise[cols].fillna(0)\n\n# Grouping by Time Values\ntoday = dataset_grouped[dataset_grouped['Date']==max(dataset_grouped['Date'])].reset_index(drop=True).drop('Date', axis=1)[['Country/Region', 'Confirmed']]\nlast_week = dataset_grouped[dataset_grouped['Date']==max(dataset_grouped['Date'])-timedelta(days=7)].reset_index(drop=True).drop('Date', axis=1)[['Country/Region', 'Confirmed']]\ntemp = pd.merge(today, last_week, on='Country/Region', suffixes=(' today', ' last week'))\n#temp = temp[['Country/Region', 'Confirmed last week']]\ntemp['1 week change'] = temp['Confirmed today'] - temp['Confirmed last week']\ntemp = temp[['Country/Region', 'Confirmed last week', '1 week change']]\ncountry_wise = pd.merge(country_wise, temp, on='Country/Region')\ncountry_wise['1 week % increase'] = round(country_wise['1 week change']/country_wise['Confirmed last week']*100, 2)",
"_____no_output_____"
],
[
"day_wise = dataset_grouped.groupby('Date')['Confirmed', 'Deaths', 'Recovered', 'Active', 'New cases'].sum().reset_index()\n# number cases per 100 cases\nday_wise['Deaths / 100 Cases'] = round((day_wise['Deaths']/day_wise['Confirmed'])*100, 2)\nday_wise['Recovered / 100 Cases'] = round((day_wise['Recovered']/day_wise['Confirmed'])*100, 2)\nday_wise['Deaths / 100 Recovered'] = round((day_wise['Deaths']/day_wise['Recovered'])*100, 2)\n# no. of countries\nday_wise['No. of countries'] = dataset_grouped[dataset_grouped['Confirmed']!=0].groupby('Date')['Country/Region'].unique().apply(len).values\n# fillna by 0\ncols = ['Deaths / 100 Cases', 'Recovered / 100 Cases', 'Deaths / 100 Recovered']\nday_wise[cols] = day_wise[cols].fillna(0)",
"_____no_output_____"
],
[
"week5_train = week5_train.drop(columns = ['County' , 'Province_State'])\nweek5_test = week5_test.drop(columns = ['County' , 'Province_State'])\nweek5_train['Date']= pd.to_datetime(week5_train['Date']).dt.strftime(\"%Y%m%d\").astype(int)\nweek5_test['Date'] = pd.to_datetime(week5_test['Date']).dt.strftime(\"%Y%m%d\").astype(int)",
"_____no_output_____"
],
[
"date_wise_data = df[['Country/Region',\"ObservationDate\",\"Confirmed\",\"Deaths\",\"Recovered\",'Active']]\ndate_wise_data['Date'] = date_wise_data['ObservationDate'].apply(pd.to_datetime, dayfirst=True)\ndate_wise_data = date_wise_data.groupby([\"ObservationDate\"]).sum().reset_index()\ndate_wise_data.rename({\"ObservationDate\": 'Date','Recovered':'Cured'}, axis=1,inplace= True) \ndef formatted_text(string):\n display(Markdown(string))\n",
"_____no_output_____"
],
[
"# Converting columns into numberic for Train \nfrom sklearn.preprocessing import LabelEncoder\nlabelencoder = LabelEncoder()\nw5_X = week5_train.iloc[:,1].values\nweek5_train.iloc[:,1] = labelencoder.fit_transform(w5_X.astype(str))\nw5_X = week5_train.iloc[:,5].values\nweek5_train.iloc[:,5] = labelencoder.fit_transform(w5_X)\n\n#Converting columns into numberic Test\nfrom sklearn.preprocessing import LabelEncoder\nlabelencoder = LabelEncoder()\nw5te_X = week5_test.iloc[:,1].values\nweek5_test.iloc[:,1] = labelencoder.fit_transform(w5te_X)\nw5te_X = week5_test.iloc[:,5].values\nweek5_test.iloc[:,5] = labelencoder.fit_transform(w5te_X)\n\n#Train & Test ======================================================\nx = week5_train.iloc[:,1:6]\ny = week5_train.iloc[:,6]\nfrom sklearn.model_selection import train_test_split\nx_train,x_test,y_train,y_test =train_test_split(x,y, test_size = 0.2, random_state = 0 )",
"_____no_output_____"
],
[
"#Adding Population Data\npop = pd.read_csv(\"/content/drive/My Drive/Master Online Exams/Dataset/population_by_country_2020.csv\")\n# select only population\npop = pop.iloc[:, :2]\n# rename column names\npop.columns = ['Country/Region', 'Population']\n# merged data\ncountry_wise = pd.merge(country_wise, pop, on='Country/Region', how='left')\n# update population\ncols = ['Burma', 'Congo (Brazzaville)', 'Congo (Kinshasa)', \"Cote d'Ivoire\", 'Czechia', \n 'Kosovo', 'Saint Kitts and Nevis', 'Saint Vincent and the Grenadines', \n 'Taiwan*', 'US', 'West Bank and Gaza']\npops = [54409800, 89561403, 5518087, 26378274, 10708981, 1793000, \n 53109, 110854, 23806638, 330541757, 4543126]\nfor c, p in zip(cols, pops):\n country_wise.loc[country_wise['Country/Region']== c, 'Population'] = p\ncountry_wise['Cases / Million People'] = round((country_wise['Confirmed'] / country_wise['Population']) * 1000000)",
"_____no_output_____"
],
[
"temp = country_wise.copy()\ntemp = temp.iloc[:,:6]\ntemp = temp.sort_values('Confirmed',ascending=False).reset_index()\ntemp.style.background_gradient(cmap='Blues',subset=[\"Confirmed\"])\\\n .background_gradient(cmap='Reds',subset=[\"Deaths\"])\\\n .background_gradient(cmap='Greens',subset=[\"Recovered\"])\\\n .background_gradient(cmap='Purples',subset=[\"Active\"])\\\n .background_gradient(cmap='PuBu',subset=[\"New cases\"])",
"_____no_output_____"
],
[
"",
"_____no_output_____"
],
[
"temp = dataset.groupby('Date')['Confirmed', 'Deaths', 'Recovered', 'Active'].sum().reset_index()\ntemp = temp[temp['Date']==max(temp['Date'])].reset_index(drop=True)\ntemp1 = temp.melt(id_vars=\"Date\", value_vars=['Active', 'Deaths', 'Recovered'])\nfig = px.pie(temp1,\n values= 'value',labels=['Active Cases','Cured','Death'],\n names=\"variable\",\n title=\"Current Situation of COVID-19 in the world\",\n template=\"seaborn\")\nfig.update_traces(hoverinfo='label+percent',textinfo='value', textfont_size=14,\n marker=dict(colors=['#263fa3','#cc3c2f','#2fcc41'], line=dict(color='#FFFFFF', width=2)))\nfig.update_traces(textposition='inside')\nfig.update_traces(rotation=90, pull=0.05, textinfo=\"percent+label\")\nfig.show()\n",
"_____no_output_____"
],
[
"perday2 = date_wise_data.groupby(['Date'])['Confirmed','Cured','Deaths','Active'].sum().reset_index().sort_values('Date',ascending = True)\nperday2['New Daily Confirmed Cases'] = perday2['Confirmed'].sub(perday2['Confirmed'].shift())\nperday2['New Daily Confirmed Cases'].iloc[0] = perday2['Confirmed'].iloc[0]\nperday2['New Daily Confirmed Cases'] = perday2['New Daily Confirmed Cases'].astype(int)\nperday2['New Daily Cured Cases'] = perday2['Cured'].sub(perday2['Cured'].shift())\nperday2['New Daily Cured Cases'].iloc[0] = perday2['Cured'].iloc[0]\nperday2['New Daily Cured Cases'] = perday2['New Daily Cured Cases'].astype(int)\nperday2['New Daily Deaths Cases'] = perday2['Deaths'].sub(perday2['Deaths'].shift())\nperday2['New Daily Deaths Cases'].iloc[0] = perday2['Deaths'].iloc[0]\nperday2['New Daily Deaths Cases'] = perday2['New Daily Deaths Cases'].astype(int)\nperday2.to_csv('/content/drive/My Drive/Master Online Exams/Dataset/perday_daily_cases.csv')",
"_____no_output_____"
],
[
"import plotly.express as px\nfig = px.bar(perday2, x=\"Date\", y=\"New Daily Confirmed Cases\", barmode='group',height=500)\nfig.update_layout(title_text='New COVID-19 cases reported daily all over the World',plot_bgcolor='rgb(275, 270, 273)')\nfig.show()",
"_____no_output_____"
],
[
"import plotly.express as px\nfig = px.bar(perday2, x=\"Date\", y=\"New Daily Cured Cases\", barmode='group',height=500,\n color_discrete_sequence = ['#319146'])\nfig.update_layout(title_text='New COVID-19 Recovered cases reported daily all over the world',plot_bgcolor='rgb(275, 270, 273)')\nfig.show()",
"_____no_output_____"
],
[
"",
"_____no_output_____"
],
[
"",
"_____no_output_____"
],
[
"# Exploratory Data Analysis",
"_____no_output_____"
],
[
"# 1. Displaying a sample of the train and test dataset\nprint('Training Dataset: \\n\\n', train.sample(5))\nprint('\\n\\n\\n Test Dataset: \\n\\n', test.sample(5))",
"_____no_output_____"
],
[
"# Visualising the Descriptive analysis of the dataset\n# 2 Visializing the data types for each colunm\n\nprint('Data Types for each columns (Training) \\n\\n',train.dtypes)\nprint('\\n\\n\\n Data Types for each columns (Testing) \\n\\n',train.dtypes)",
"_____no_output_____"
],
[
"# 2 Visualizing the Static Information for Numerical Columns\n\nprint('Descriptiva analytics for the dataset (Training): \\n\\n',train.describe())\nprint('\\n\\n\\n Descriptive analytics for dataset (test): \\n\\n' ,test.describe())",
"_____no_output_____"
],
[
"# Displaying some information regarding the dataset\nprint('Dataset information (Training): \\n\\n',train.info())\nprint('\\n\\n\\n Dataset information (Testing): \\n\\n', test.info())",
"_____no_output_____"
],
[
"# Displaying the shape of the dataset\nprint('Traing dataset shape: ', train.shape)\nprint('\\n Testing Dataset shape: ' ,test.shape)",
"_____no_output_____"
],
[
"# Displaying graph of the top 15 Countries\nl = train[['Country_Region','TargetValue']].groupby(['Country_Region'], as_index = False).sum().sort_values(by = 'TargetValue',ascending=False)\nw = pd.DataFrame(l)\ndata1 = l.head(15)\nfig = px.bar(data1,x = 'Country_Region',y = 'TargetValue')\nfig.show()",
"_____no_output_____"
],
[
"# pi char Vusualizing confirmed casea and faitalities\nfig = px.pie(train, values='TargetValue', names='Target')\nfig.update_traces(textposition='inside')\nfig.update_layout(uniformtext_minsize=12, uniformtext_mode='hide')\nfig.show()",
"_____no_output_____"
],
[
"# Displaing the curent world share of covid-19 cases\n\nfig = px.pie(train, values='TargetValue', names='Country_Region')\nfig.update_traces(textposition='inside')\nfig.update_layout(uniformtext_minsize=12, uniformtext_mode='hide')\nfig.show()",
"_____no_output_____"
],
[
"# Tree Map of Covid Cases Globaly\nfig = px.treemap(train, path=['Country_Region'], values='TargetValue',\n color='Population', hover_data=['Country_Region'],\n color_continuous_scale='matter', title='Current share of Worldwide COVID19 Confirmed Cases')\nfig.show()",
"_____no_output_____"
],
[
"fig = px.treemap(train, path=['Country_Region'], values='TargetValue',\n color='Population', hover_data=['Country_Region'],\n color_continuous_scale='matter', title='Current share of Worldwide COVID19 Confirmed Cases')\nfig.show()",
"_____no_output_____"
],
[
"# COVID 19 Total cases growth for top 15 countries\nlast_date = train.Date.max()\ndf_countries = train[train['Date']==last_date]\ndf_countries = df_countries.groupby('Country_Region', as_index=False)['TargetValue'].sum()\ndf_countries = df_countries.nlargest(15,'TargetValue')\ndf_trend = train.groupby(['Date','Country_Region'], as_index=False)['TargetValue'].sum()\ndf_trend = df_trend.merge(df_countries, on='Country_Region')\ndf_trend.rename(columns={'Country_Region':'Country', 'TargetValue_x':'Cases'}, inplace=True)\npx.line(df_trend, x='Date', y='Cases', color='Country', title='COVID19 Total Cases growth for top 10 worst affected countries')\n",
"_____no_output_____"
],
[
"",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a64c1225acc1a1f7f80e255c64ab3ddc4f2d231
| 153,916 |
ipynb
|
Jupyter Notebook
|
CSV to HTML.ipynb
|
satoran1/Web-Design-Challenge
|
1b3eeab09ef9f561bc4988d1f5eda8253e7d0231
|
[
"ADSL"
] | null | null | null |
CSV to HTML.ipynb
|
satoran1/Web-Design-Challenge
|
1b3eeab09ef9f561bc4988d1f5eda8253e7d0231
|
[
"ADSL"
] | null | null | null |
CSV to HTML.ipynb
|
satoran1/Web-Design-Challenge
|
1b3eeab09ef9f561bc4988d1f5eda8253e7d0231
|
[
"ADSL"
] | null | null | null | 773.447236 | 148,586 | 0.389914 |
[
[
[
"import pandas as pd",
"_____no_output_____"
],
[
"df = pd.read_csv(\"Resources/cities.csv\")\ndf.head()",
"_____no_output_____"
],
[
"html_table = df.to_html()\nhtml_table",
"_____no_output_____"
],
[
"df.to_html('table.html')",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code"
]
] |
4a64c7b81344317e0d9e7e963415dfca339481dd
| 467,343 |
ipynb
|
Jupyter Notebook
|
notebooks/case_study/3_Case_Study_Analysis.ipynb
|
IOMRC/intake-aodn
|
cd7d4df1fafc1afd36182a5d535fe62ab3fd2643
|
[
"BSD-3-Clause"
] | 2 |
2021-12-10T04:24:02.000Z
|
2022-02-04T14:04:29.000Z
|
notebooks/case_study/3_Case_Study_Analysis.ipynb
|
IOMRC/intake-aodn
|
cd7d4df1fafc1afd36182a5d535fe62ab3fd2643
|
[
"BSD-3-Clause"
] | null | null | null |
notebooks/case_study/3_Case_Study_Analysis.ipynb
|
IOMRC/intake-aodn
|
cd7d4df1fafc1afd36182a5d535fe62ab3fd2643
|
[
"BSD-3-Clause"
] | null | null | null | 1,079.314088 | 160,660 | 0.955932 |
[
[
[
"Author: Maxime Marin \n@: [email protected]\n\n# Accessing IMOS data case studies: Walk-through and interactive session - Analysis\n\nIn this notebook, we will provide a receipe for further analysis to be done on the same dataset we selected earlier. In the future, a similar notebook can be tailored to a particular dataset, performing analysis that is easily repeatable. \nAlternatively, curious users can use the code to \"tweak it\" to their needs, and perform slightly different analysis and visualisation. This is why we have deliberately left some code in the cells, rather than hiding it.\n\nAs always, we start by importing our data and some main libraries\n",
"_____no_output_____"
]
],
[
[
"import sys\nimport os\nsys.path.append('/home/jovyan/intake-aodn')\nimport intake_aodn\nimport matplotlib.pyplot as plt\n\nfrom intake_aodn.plot import Clim_plot\nfrom intake_aodn.analysis import lin_trend, make_clim",
"_____no_output_____"
],
[
"import xarray as xr\ndata = xr.open_dataset('Example_Data.nc')",
"_____no_output_____"
]
],
[
[
"***\n\n## 1) Climatology\n\nCalculating climatology of a particular variable is a very common operation performed in climate science. It allows to quantify the \"mean\" state of a particular variable and later substract it to the given variable to obtain anomalies. \n\nThe most common climatology is done on a yearly timescale. It is equivalent to yearly average and is useful to calculate linear trends:",
"_____no_output_____"
]
],
[
[
"# We will work with the box-average timeseries:\ndata_bavg = data.stack(space=['longitude','latitude']).mean(dim='space')\n\n# Perform and plot annual climatology\nclim,ax = Clim_plot(da = data_bavg['sea_surface_temperature'],time_res = 'year')\nylab = ax.get_ylabel() # stores the y-axis label to re-use later.\n\n# Calculate the linear trend and confidence interval\ncoef,fit,hci,lci = lin_trend(clim,'year')\n\n# Plot the linear model\nfit['linear_fit'].plot(ax=ax,color='red',label = 'trend')\nplt.fill_between(lci['year'].values,lci['linear_fit'].values,hci['linear_fit'].values,alpha=0.2,color='red')\n\n# add label, title and legend\nax.set_ylabel(ylab)\nax.set_title('Annual Mean')\nplt.legend();",
"_____no_output_____"
]
],
[
[
"We have plotted the annual averages of box-averaged SST, along with the correspinding linear trend. However, something seems to be off.. \n\nThe first and last yearly values appear to be underestimated and overestimated, respectively. Why is that? (Hint: try to execute `clim.year[0]`, and `data.time[0]`. Hint 2: the access the last index of a list, we use the `[-1]`.) ",
"_____no_output_____"
]
],
[
[
"# type code here",
"_____no_output_____"
]
],
[
[
"The cell below outputs the same plot as before, but the 7th line is different. The function `Clim_plot()` takes `time_main` as an argument, which defines what time period we are interested in. Can we change line 7 to get a better plot?...",
"_____no_output_____"
]
],
[
[
"# We will work with the box-average timeseries:\ndata_bavg = data.stack(space=['longitude','latitude']).mean(dim='space')\n\n# Perform and plot annual climatology\nclim,ax = Clim_plot(da = data_bavg['sea_surface_temperature'],time_res = 'year',time_main = ['1992-01-01','2021-12-31'])\nylab = ax.get_ylabel() # stores the y-axis label to re-use later.\n\n# Calculate the linear trend and confidence interval\ncoef,fit,hci,lci = lin_trend(clim,'year')\n\n# Plot the linear model\nfit['linear_fit'].plot(ax=ax,color='red',label = 'trend')\nplt.fill_between(lci['year'].values,lci['linear_fit'].values,hci['linear_fit'].values,alpha=0.2,color='red')\n\n# add label, title and legend\nax.set_ylabel(ylab)\nax.set_title('Annual Mean')\nplt.legend();",
"_____no_output_____"
]
],
[
[
"***\n\n## 2) Monthly climatology\n\nUsing the same function as before `Clim_plot` we can also calculate monthly climatology, which gives us the mean state of the variable for all months of the year. \nTo do so, we just have to change the `time_res` argument such as `time_res = 'month'`",
"_____no_output_____"
]
],
[
[
"clim,ax = Clim_plot(data_bavg['sea_surface_temperature'],time_res = 'month',time_main = ['1992-01-01','2021-12-31'],time_recent = ['2011-01-01', None],ind_yr = [2011,2018 ,2019])\nax.set_title('Monthly Climatology');",
"_____no_output_____"
]
],
[
[
"We notice that the function takes several other arguments than before, including `time_recent` and `ind_yr`. \n`time_recent` tells the function to also plot monthly climatology for a \"more recent\" time period, reflecting the recent mean state. \n` ind_yr` let the user chose individual years to plot. This is useful to compare one particular year with the average. For example, we clearly see that the 2011 summer was way warmer than usual!\n\nNote: You can add these arguments if `time_res = 'year'`, but it will not do anything as it has no purpose for annual climatology.\n\n*** \n\n## 3) Linear trends\n\nIt can also be useful to visualise the spatial distribution of long-term changes or trends. Rather than plotting a linear trend from one timeseries, let's calculate linear trend coefficients at all pixels and map it.",
"_____no_output_____"
]
],
[
[
"from intake_aodn.plot import map_var, create_cb\nfrom intake_aodn.analysis import make_clim\nimport cmocean\nimport numpy as np\n\n#First, we calculate yearly averages\nclim = make_clim(data['sea_surface_temperature'],time_res = 'year')\n\n#Then we can compute our linear models\ncoef,fit,hci,lci= lin_trend(clim[0],'year',deg=1)\n#We rename a variable so that our plot makes more sense\ncoef = coef.rename({'polyfit_coefficients':'SST trend [C/decade]'})\n\n#let's plot\nfig = plt.figure(figsize=(30,8))\nax,gl,axproj = map_var((coef.isel(degree=0)['SST trend [C/decade]']*10),[coef.longitude.min(),coef.longitude.max()],[coef.latitude.min(),coef.latitude.max()],\n title = 'Linear Trends',\n cmap = cmocean.cm.balance,\n add_colorbar = False,\n vmin = -0.4,vmax = 0.4,\n levels=np.arange(-0.4,0.45,0.05));\ncb = create_cb(fig,ax,axproj,'SST trend [C/decade]',size = \"4%\", pad = 0.5,labelpad = 20,fontsize=20)",
"_____no_output_____"
]
],
[
[
"***\n\n## 4) Anomalies\n\nFinally, let's use climatology to compute anomalies. This is particularly usefull to understand inter-annual variability of the system (ENSO influence), which requires removing the seasonal cycle.",
"_____no_output_____"
]
],
[
[
"from intake_aodn.analysis import time_average, make_clim\n\n# Make monthly anomalies\nmn_ano = time_average(data,'M',var='sea_surface_temperature',ignore_inc = True).groupby('time.month') - make_clim(data['sea_surface_temperature'],time_res = 'month')[0]\n\n# Compute box-averge and plot\nbavg = mn_ano.stack(space=['longitude','latitude']).mean(dim='space')\nfig = plt.figure(figsize=(12,8))\nbavg.plot(label='monthly',color='black')\n\n# Make yearly running anomalies on monthly timescale\nyr_rol = bavg.rolling(time = 12, center=True).mean()\n\n# plot smoothed timeseries\nyr_rol.plot(label='annual',color='red',lw = 2)\nplt.legend();\n\n# fix xlim\nxl = mn_ano.coords['time'].values\nplt.xlim(xl.min(),xl.max())",
"_____no_output_____"
]
],
[
[
"The plot shows that monthly variability can be quite important compared to inter-annual variability, hence why smoothing can enhance important inter-annual patterns. \n\n***\n\n## 5) Multi maps\n\nFinally, we give the possibility to the user to make a publication-ready subplot map of a statistic of their data. \n\nThe first step requires the user to define the data to plot. Let's imagine we want to plot monthly SST anomalies that we calculated in part 4, over a specific year. \nOnce the data is ready, we simply have to call `multimap()` function, give it the dimension of the subplot panels and the number of columns we want:\n\n",
"_____no_output_____"
]
],
[
[
"from intake_aodn.plot import multimap\nimport cmocean\nfrom intake_aodn.analysis import time_average, make_clim\n\n# Make monthly anomalies\nda = time_average(data,'M',var='sea_surface_temperature',ignore_inc = True).groupby('time.month') - make_clim(data['sea_surface_temperature'],time_res = 'month')[0]\n\n# data to plot\nda = da.sel(time = slice('2011-01-01','2011-12-31'))\n\nfig = multimap(da,col = 'time',col_wrap=4,freq = 'month')\n",
"_____no_output_____"
]
],
[
[
"\n***\n\n## 6) Saving Figures\n\n*But where is the save button!* \n\nI know, I did not place a save button. But remember, you can always save any image by right-clicking on it. We are working on a browser! \n\nOf course, we might want to save figures at a better quality, especially for publications. In reality, saving a plot is very easy, just insert the one-liner below at the end of any cell, chosing your file name, and format:",
"_____no_output_____"
]
],
[
[
"plt.gcf().savefig(\"filename.changeformathere\");#plt.gcf() indicates you want to save the latest figure.",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a64ceabe9767d5487286e2fbb50c1966097ca10
| 566,772 |
ipynb
|
Jupyter Notebook
|
2018/tutorials/Tutorial -- Optimization (Lecture 6).ipynb
|
mehrdad-shokri/cs375
|
00554ac497b4a2608ae475099f94ab9635d67b9e
|
[
"MIT"
] | 32 |
2017-09-19T21:40:38.000Z
|
2020-07-26T02:40:23.000Z
|
2018/tutorials/Tutorial -- Optimization (Lecture 6).ipynb
|
mehrdad-shokri/cs375
|
00554ac497b4a2608ae475099f94ab9635d67b9e
|
[
"MIT"
] | null | null | null |
2018/tutorials/Tutorial -- Optimization (Lecture 6).ipynb
|
mehrdad-shokri/cs375
|
00554ac497b4a2608ae475099f94ab9635d67b9e
|
[
"MIT"
] | 17 |
2017-10-04T23:43:56.000Z
|
2019-04-22T16:44:39.000Z
| 148.876281 | 25,776 | 0.864741 |
[
[
[
"\"\"\"The purpose of this tutorial is to introduce you to:\n (1) how gradient-based optimization of neural networks \n operates in concrete practice, and \n (2) how different forms of learning rules lead to more or less \n efficient learning as a function of the shape of the optimization\n landscape \n\nThis tutorial should be used in conjunction with the lecture:\n\n http://cs375.stanford.edu/lectures/lecture6_optimization.pdf\n\n\"\"\";",
"_____no_output_____"
],
[
"%matplotlib inline\n\nimport matplotlib\nimport matplotlib.pyplot as plt\n\n#the above imports the plotting library matplotlib",
"_____no_output_____"
],
[
"#standard imports\nimport time\nimport numpy as np\nimport h5py\n",
"_____no_output_____"
],
[
"#We're not using the GPU here, so we set the \n#\"CUDA_VISIBLE_DEVICES\" environment variable to -1\n#which tells tensorflow to only use the CPU\n\nimport os\nos.environ[\"CUDA_VISIBLE_DEVICES\"]=\"-1\" \nimport tensorflow as tf",
"_____no_output_____"
]
],
[
[
"## Gradient Descent",
"_____no_output_____"
]
],
[
[
"#let's define a model which \"believes\" that the output data\n#is scalar power of a scalar input, e.g. :\n# y ~ x^p\n\n#defining the scalar input data variable \nbatch_size = 200\n\n#the \"placeholder\" mechanis is similar in effect to\n# x = tf.get_variable('x', shape=(batch_size,), dtype=tf.float32)\n#except we don't have to define a fixed name \"x\"\nx = tf.placeholder(shape=(batch_size,), dtype=tf.float32)\n\n#define the scalar power variable\ninitial_power = tf.zeros(shape=())\npower = tf.get_variable('pow', initializer=initial_power, dtype=tf.float32)\n\n#define the model\nmodel = x**power\n\n#the output data needs a variable too\ny = tf.placeholder(shape=(batch_size,), dtype=tf.float32)",
"_____no_output_____"
],
[
"#the error rate of the model is mean L2 distance across \n#the batch of data\npower_loss = tf.reduce_mean((model - y)**2)",
"_____no_output_____"
],
[
"#now, our goal is to use gradient descent to \n#figure out the parameter of our model -- namely, the power variable\ngrad = tf.gradients(power_loss, power)[0]",
"_____no_output_____"
],
[
"#Let's fit (optimize) the model. \n#to do that we'll have to first of course define a tensorflow session\nsess = tf.Session()",
"_____no_output_____"
],
[
"#... and initialize the power variable\ninitializer = tf.global_variables_initializer()\nsess.run(initializer)",
"_____no_output_____"
],
[
"#ok ... so let's test the case where the true input-output relationship\n#is x --> x^2\nxval = np.arange(0, 2, .01)\nyval = np.arange(0, 2, .01)**2",
"_____no_output_____"
],
[
"#OK\ninitial_guess = 0\nassign_op = tf.assign(power, initial_guess)\nsess.run(assign_op)\n\ngradval = sess.run(grad, feed_dict={x: xval, y: yval})",
"_____no_output_____"
],
[
"gradval",
"_____no_output_____"
],
[
"#ok so this is telling us to do:\n\nnew_guess = initial_guess + -1 * (gradval)\nprint(new_guess)",
"1.530632495880127\n"
],
[
"#ok so let's assign the new guess to the power variable\nassign_op = tf.assign(power, new_guess)\nsess.run(assign_op)\n\n#... and get the gradient again\ngradval = sess.run(grad, feed_dict={x: xval, y: yval})\n\ngradval",
"_____no_output_____"
],
[
"new_guess = new_guess + -1 * (gradval)\nprint(new_guess)",
"2.09256654978\n"
],
[
"#... and one more time ...\nassign_op = tf.assign(power, new_guess)\nsess.run(assign_op)\n\n#... get the gradient again\ngradval = sess.run(grad, feed_dict={x: xval, y: yval})\nprint('gradient: %.3f', gradval)\n\n#... do the update\nnew_guess = new_guess + -1 * (gradval)\nprint('power: %.3f', new_guess)\n\n#ok so we're hovering back and forth around guess of 2.... which is right!",
"('gradient: %.3f', 0.17790559)\n('power: %.3f', 1.9146609604358673)\n"
],
[
"#OK let's do this in a real loop and keep track of useful stuff along the way\nxval = np.arange(0, 2, .01)\nyval = np.arange(0, 2, .01)**2\n\n#start the guess off at 0 again\nassign_op = tf.assign(power, 0)\nsess.run(assign_op)\n\n#let's keep track of the guess along the way\npowers = []\n\n#and the loss, which should go down\nlosses = []\n\n#and the grads just for luck\ngrads = []\n\n#let's iterate the gradient descent process 20 timesteps\nnum_iterations = 20\n\n#for each timestep ...\nfor i in range(num_iterations):\n \n #... get the current derivative (grad), the current guess of \"power\"\n #and the loss, given the input and output training data (xval & yval)\n cur_power, cur_loss, gradval = sess.run([power, power_loss, grad], \n feed_dict={x: xval, y: yval})\n\n #... keep track of interesting stuff along the way\n powers.append(cur_power)\n losses.append(cur_loss)\n grads.append(gradval)\n \n #... now do the gradient descent step\n new_power = cur_power - gradval\n \n #... and actually update the value of the power variable\n assign_op = tf.assign(power, new_power)\n sess.run(assign_op)\n \n #and then, the loop runs again\n \nplt.plot(powers, label='estimated power')\nplt.plot(losses, label='loss')\nplt.plot(grads, label='gradients')\nplt.xlabel('iterations')\nplt.legend(loc='lower right')\nplt.title('Estimating a quadratic')",
"_____no_output_____"
],
[
"##ok now let's try that again except where y ~ x^3\n\n#all we need to do is change the data\nxval = np.arange(0, 2, .01)\nyval = np.arange(0, 2, .01)**3\n\n#The rest of the code remains the same\nassign_op = tf.assign(power, 0)\nsess.run(assign_op)\n\npowers = []\nlosses = []\ngrads = []\n\nnum_iterations = 20\nfor i in range(num_iterations): \n cur_power, cur_loss, gradval = sess.run([power, power_loss, grad], \n feed_dict={x: xval, y: yval})\n powers.append(cur_power)\n losses.append(cur_loss)\n grads.append(gradval)\n\n new_power = cur_power - gradval\n\n assign_op = tf.assign(power, new_power)\n sess.run(assign_op)\n \n \nplt.plot(powers, label='estimated power')\nplt.plot(losses, label='loss')\nplt.xlabel('iterations')\nplt.legend(loc='center right')\nplt.title('Failing to estimate a cubic')\n \n#wait ... this did *not* work. why? ",
"_____no_output_____"
],
[
"#whoa ... the loss must have diverged to infinity (or close) really early\nlosses\n#why? ",
"_____no_output_____"
],
[
"#let's look at the gradients\ngrads\n#hm. the gradient was getting big at the end. ",
"_____no_output_____"
],
[
"#after all, the taylor series only works in the close-to-the-value limit. \n#we must have been been taking too big steps. \n#how do we fix this? ",
"_____no_output_____"
]
],
[
[
"### With Learning Rate",
"_____no_output_____"
]
],
[
[
"def gradient_descent(loss, \n target,\n initial_guess,\n learning_rate,\n training_data,\n num_iterations):\n \n #assign initial value to the target\n initial_op = tf.assign(target, initial_guess)\n\n #get the gradient\n grad = tf.gradients(loss, target)[0]\n \n #actually do the gradient descent step directly in tensorflow\n newval = tf.add(target, tf.multiply(-grad, learning_rate))\n \n #the optimizer step actually performs the parameter update\n optimizer_op = tf.assign(target, newval)\n \n #NB: none of the four steps above are actually running anything yet\n #They are just formal graph computations. \n #to actually do anything, you have to run stuff in a session. \n \n #set up containers for stuff we want to keep track of\n targetvals = []\n losses = []\n gradvals = []\n \n #first actually run the initialization operation\n sess.run(initial_op)\n \n #now take gradient steps in a loop\n for i in range(num_iterations):\n #just by virtue of calling \"run\" on the \"optimizer\" op, \n #the optimization occurs ... \n output = sess.run({'opt': optimizer_op,\n 'grad': grad, \n 'target': target,\n 'loss': loss\n }, \n feed_dict=training_data)\n targetvals.append(output['target'])\n losses.append(output['loss'])\n gradvals.append(output['grad'])\n \n return losses, targetvals, gradvals\n ",
"_____no_output_____"
],
[
"xval = np.arange(0, 2, .01)\nyval = np.arange(0, 2, .01)**3\ndata_dict = {x: xval, y:yval}\n\nlosses, powers, grads = gradient_descent(loss=power_loss,\n target=power,\n initial_guess=0,\n learning_rate=.25, #chose learning rate < 1\n training_data=data_dict,\n num_iterations=20)\nplt.plot(powers, label='estimated power')\nplt.plot(losses, label='loss')\nplt.legend(loc='upper right')\nplt.title('Estimating a cubic')\n\n#ok -- now the result stably converges!",
"_____no_output_____"
],
[
"#and also for a higher power .... \nxval = np.arange(0, 2, .01)\nyval = np.arange(0, 2, .01)**4\ndata_dict = {x: xval, y:yval}\n\nlosses, powers, grads = gradient_descent(loss=power_loss,\n target=power,\n initial_guess=0,\n learning_rate=0.1,\n training_data=data_dict,\n num_iterations=100)\nplt.plot(powers, label='estimated power')\nplt.plot(losses, label='loss')\nplt.legend(loc='upper right')\nplt.title('Estimating a quartic')",
"_____no_output_____"
],
[
"#what about when the data is actually not of the right form?\n\nxval = np.arange(0, 2, .01)\nyval = np.sin(xval)\n\ndata_dict = {x: xval, y:yval}\n\nlosses, powers, grads = gradient_descent(loss=power_loss,\n target=power,\n initial_guess=0,\n learning_rate=0.1,\n training_data=data_dict,\n num_iterations=20)\nplt.plot(powers, label='estimated power')\nplt.plot(losses, label='loss')\nplt.legend(loc='center right')\nplt.title('Estimating sine with a power, not converged yet')\n\n#doesn't look like it's converged yet -- maybe we need to run it longer? ",
"_____no_output_____"
],
[
"#sine(x) now with more iterations\n\nxval = np.arange(0, 2, .01)\nyval = np.sin(xval)\n\ndata_dict = {x: xval, y:yval}\n\nlosses, powers, grads = gradient_descent(loss=power_loss,\n target=power,\n initial_guess=0,\n learning_rate=0.1,\n training_data=data_dict,\n num_iterations=100) #<-- more iterations\n\nplt.plot(powers, label='estimated power')\nplt.plot(losses, label='loss')\nplt.legend(loc='center right')\nplt.title('Estimating sine with a power (badly)')\n\n#ok it's converged but not to a great loss. This is unsurprising \n#since x^p is a bad model for sine(x)",
"_____no_output_____"
],
[
"#how should we improve? \n#THE MACHINE LEARNING ANSWER: well, let's have more parameters in our model!\n\n#actually, let's write a model using the Taylor series idea more explicitly:\n# y ~ sum_i a_i x^i \n#for some coefficients a_i that we have to learn",
"_____no_output_____"
],
[
"#let's go out to x^5, so approx_order = 7 (remember, we're 0-indexing in python)\napproximation_order = 6\n\n#ok so now let's define the variabe we'll be using\n#instead of \"power\" this will be coefficients of the powers\n#with one coefficient for each power from 0 to approximation_order-1\ncoefficients = tf.get_variable('coefficients', \n initializer = tf.zeros(shape=(approximation_order,)),\n dtype=tf.float32)",
"_____no_output_____"
],
[
"#gotta run the initializer again b/c we just defined a new trainable variable\ninitializer = tf.global_variables_initializer()\nsess.run(initializer)",
"_____no_output_____"
],
[
"sess.run(coefficients)",
"_____no_output_____"
],
[
"#Ok let's define the model\n\n#here's the vector of exponents\npowervec = tf.range(0, approximation_order, dtype=tf.float32)\n\n#we want to do essentially:\n# sum_i coefficient_i * x^powervec[i]\n#but to do x^powervec, we need to create an additional dimension on x\nx_expanded = tf.expand_dims(x, axis=1)\n\n#ok, now we can actually do x^powervec\nx_exponentiated = x_expanded**powervec\n\n#now multiply by the coefficient variable\nx_multiplied_by_coefficients = coefficients * x_exponentiated\n\n#and add up over the 1st dimension e.g. dong the sum_i\npolynomial_model = tf.reduce_sum(x_multiplied_by_coefficients, axis=1)\n\n#the loss is again l2 difference between prediction and desired output\npolynomial_loss = tf.reduce_mean((polynomial_model - y)**2)",
"_____no_output_____"
],
[
"xval = np.arange(-2, 2, .02)\nyval = np.sin(xval)\n\ndata_dict = {x: xval, y:yval}\n\n#starting out at 0 since the coefficients were all intialized to 0\nsess.run(polynomial_model, feed_dict=data_dict)",
"_____no_output_____"
],
[
"#ok let's try it\nlosses, coefvals, grads = gradient_descent(loss=polynomial_loss,\n target=coefficients,\n initial_guess=np.zeros(approximation_order),\n learning_rate=0.1, \n training_data=data_dict,\n num_iterations=100) \n",
"_____no_output_____"
],
[
"#ok, so for each timstep we have 6 values -- the coefficients\nprint(len(coefvals))\n\ncoefvals[-1].shape",
"100\n"
],
[
"#here's the last set of coefficients learned\ncoefvals[-1]\n#whoa -- what's going on? ",
"_____no_output_____"
],
[
"#let's lower the learning rate\nlosses, coefvals, grads = gradient_descent(loss=polynomial_loss,\n target=coefficients,\n initial_guess=np.zeros(approximation_order),\n learning_rate=0.005, #<-- lowered learning rate\n training_data=data_dict,\n num_iterations=100) \n",
"_____no_output_____"
],
[
"#ok not quite as bad\ncoefvals[-1]",
"_____no_output_____"
],
[
"#let's visualize what we learned\nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.plot(xval, yval)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\n\n\n#ok, fine, but not great",
"_____no_output_____"
],
[
"#what if we let it run longer? \nlosses, coefvals, grads = gradient_descent(loss=polynomial_loss,\n target=coefficients,\n initial_guess=np.zeros(approximation_order),\n learning_rate=0.005, \n training_data=data_dict,\n num_iterations=5000) #<-- more iterations\n\nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\nplt.plot(xval, yval)\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\nplt.title('Loss with Gradient Descent')\n\n\n#ok much better",
"_____no_output_____"
],
[
"coefvals[-1]",
"_____no_output_____"
],
[
"tf.Variable(np.zeros(6))",
"_____no_output_____"
]
],
[
[
"### With momentum",
"_____no_output_____"
]
],
[
[
"def gradient_descent_with_momentum(loss, \n target,\n initial_guess,\n learning_rate,\n momentum,\n training_data,\n num_iterations):\n \n #set target to initial guess\n initial_op = tf.assign(target, initial_guess)\n #get gradient\n grad = tf.gradients(loss, target)[0]\n #set up the variable for the gradient accumulation\n grad_shp = grad.shape.as_list()\n #needs to be specified as float32 to interact properly with other things (but numpy defaults to float64)\n grad_accum = tf.Variable(np.zeros(grad_shp).astype(np.float32))\n #gradplus = grad + momentum * grad_accum\n gradplus = tf.add(grad, tf.multiply(grad_accum, momentum))\n #newval = oldval - learning_rate * gradplus\n newval = tf.add(target, tf.multiply(-gradplus, learning_rate))\n #the optimizer step actually performs the parameter update\n optimizer_op = tf.assign(target, newval)\n #this step updates grad_accum \n update_accum = tf.assign(grad_accum, gradplus)\n \n #run initialization\n sess.run(initial_op)\n #necessary b/c we've defined a new variable (\"grad_accum\") above\n init_op = tf.global_variables_initializer()\n sess.run(init_op)\n \n #run the loop\n targetvals = []\n losses = []\n gradvals = [] \n times = []\n for i in range(num_iterations):\n t0 = time.time()\n output = sess.run({'opt': optimizer_op, #have to have this for optimization to occur\n 'accum': update_accum, #have to have this for grad_accum to update\n 'grad': grad, #the rest of these are just for keeping track\n 'target': target, \n 'loss': loss\n }, \n feed_dict=training_data)\n times.append(time.time() - t0)\n targetvals.append(output['target'])\n losses.append(output['loss'])\n gradvals.append(output['grad'])\n \n print('Average time per iteration --> %.5f' % np.mean(times))\n return losses, targetvals, gradvals\n ",
"_____no_output_____"
],
[
"losses, coefvals, grads = gradient_descent_with_momentum(loss=polynomial_loss,\n target=coefficients,\n initial_guess=np.zeros(approximation_order),\n learning_rate=0.01, #<-- can use higher learning rate!\n momentum=0.9,\n training_data=data_dict,\n num_iterations=250) #<-- can get away from fewer iterations!\n\nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\nplt.plot(xval, yval)\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\nplt.title('Loss with Gradient Descent')\n\n#so momentum is really useful ",
"Average time per iteration --> 0.00134\n"
]
],
[
[
"### Tensorflow's Built-In Optimizers",
"_____no_output_____"
]
],
[
[
"def tf_builtin_optimization(loss,\n optimizer_class,\n target,\n training_data,\n num_iterations,\n optimizer_args=(),\n optimizer_kwargs={},\n ):\n \n #construct the optimizer\n optimizer = optimizer_class(*optimizer_args, \n **optimizer_kwargs)\n #formal tensorflow optimizers will always have a \"minimize\" method\n #this is how you actually get the optimizer op\n optimizer_op = optimizer.minimize(loss)\n \n init_op = tf.global_variables_initializer()\n sess.run(init_op)\n\n targetvals = []\n losses = []\n times = []\n for i in range(num_iterations):\n t0 = time.time()\n output = sess.run({'opt': optimizer_op,\n 'target': target,\n 'loss': loss}, \n feed_dict=training_data)\n times.append(time.time() - t0)\n targetvals.append(output['target'])\n losses.append(output['loss'])\n \n print('Average time per iteration --> %.5f' % np.mean(times))\n return np.array(losses), targetvals",
"_____no_output_____"
],
[
"xval = np.arange(-2, 2, .02)\nyval = np.sin(xval)\n\ndata_dict = {x: xval, y:yval}\n\nlosses, coefvals = tf_builtin_optimization(loss=polynomial_loss,\n optimizer_class=tf.train.GradientDescentOptimizer,\n target=coefficients,\n training_data=data_dict,\n num_iterations=5000,\n optimizer_args=(0.005,),\n ) #<-- more iterations\n\nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\nplt.plot(xval, yval)\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\nplt.title('Loss with Gradient Descent')\n\n\n#right ok, we recovered what we did before by hand, now using\n#the standard tensorflow tools",
"Average time per iteration --> 0.00125\n"
],
[
"#Let's use the Momentum Optimizer. standard parameters for learning \n#are learning_rate = 0.01 and momentum = 0.9\n\nxval = np.arange(-2, 2, .02)\nyval = np.sin(xval )\ndata_dict = {x: xval, y:yval}\n\nlosses, coefvals = tf_builtin_optimization(loss=polynomial_loss,\n optimizer_class=tf.train.MomentumOptimizer,\n target=coefficients,\n training_data=data_dict,\n num_iterations=250, \n optimizer_kwargs={'learning_rate': 0.01,\n 'momentum': 0.9}) \nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\nplt.plot(xval, yval)\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\nplt.title('Loss with Momentum Optimizer')\n\n#again reproducing what we see before by hand",
"Average time per iteration --> 0.00133\n"
],
[
"#and we can try some other stuff, such as the Adam Optimizer\n\nlosses, coefvals = tf_builtin_optimization(loss=polynomial_loss,\n optimizer_class=tf.train.AdamOptimizer,\n target=coefficients,\n training_data=data_dict,\n num_iterations=500,\n optimizer_kwargs={'learning_rate': 0.01}) \n \nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\nplt.plot(xval, yval)\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\nplt.title('Loss with Adam optimizer')\n\n#Adam as usualy requires a bit more steps than Momentum -- but the advantage of Adam\n#is that sometimes Momentum blows up and Adam is usually more stable\n#(compare the loss traces! even though Momentum didn't below up above, it's\n#loss is much more jaggedy -- signs up potential blowup)",
"Average time per iteration --> 0.00146\n"
],
[
"#so hm ... maybe because Adam is more stable we can jack up the \n#initial learning rate and thus converge even faster than with Momentum\n\nlosses, coefvals = tf_builtin_optimization(loss=polynomial_loss,\n optimizer_class=tf.train.AdamOptimizer,\n target=coefficients,\n training_data=data_dict,\n num_iterations=150,\n optimizer_kwargs={'learning_rate': 0.5}) \n \nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\nplt.plot(xval, yval)\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\nplt.title('Loss with Adam optimizer\\nhigh initial learning rate')\n\n#indeed we can!",
"Average time per iteration --> 0.00169\n"
]
],
[
[
"### Newton's Method (Second Order)",
"_____no_output_____"
]
],
[
[
"def newtons_method(loss, \n target,\n initial_guess,\n training_data,\n num_iterations,\n grad2clip=1.):\n \n #create initialization operation\n initial_op = tf.assign(target, initial_guess) \n\n grad = tf.gradients(loss, target)[0]\n #to actually compute the second order correction \n #we split the one-variable and multi-variable cases up -- for ease of working\n \n if len(target.shape) == 0: #one-variable case\n #actually get the second derivative\n grad2 = tf.gradients(grad, target)[0]\n \n #now morally we want to compute:\n # newval = target - grad / grad2\n #BUT there is often numerical instability caused by dividing\n #by grad2 if grad2 is small... so we have to clip grad2 by a clip value\n clippedgrad2 = tf.maximum(grad2, grad2clip)\n \n #and now we can do the newton's formula update\n newval = tf.add(target, -tf.divide(grad, clippedgrad2))\n else:\n #in the multi-variable case, we first compute the hessian matrix\n #thank gosh tensorflow has this built in finally!\n hess = tf.hessians(loss, target)[0]\n #now we take it's inverse\n hess_inv = tf.matrix_inverse(hess)\n #now we get H^{-1} grad, e.g. multiple the matrix by the vector\n hess_inv_grad = tf.tensordot(hess_inv, grad, 1)\n #again we have to clip for numerical stability\n hess_inv_grad = tf.clip_by_value(hess_inv_grad, -grad2clip, grad2clip)\n #and get the new value for the parameters\n newval = tf.add(target, -hess_inv_grad)\n \n #the rest of the code is just as in the gradient descent case\n optimizer_op = tf.assign(target, newval) \n targetvals = []\n losses = []\n gradvals = []\n sess.run(initial_op)\n for i in range(num_iterations): \n output = sess.run({'opt': optimizer_op,\n 'grad': grad, \n 'target': target,\n 'loss': loss}, \n feed_dict=training_data)\n targetvals.append(output['target'])\n losses.append(output['loss'])\n gradvals.append(output['grad'])\n return losses, targetvals, gradvals\n",
"_____no_output_____"
],
[
"xval = np.arange(0, 2, .01)\nyval = np.arange(0, 2, .01)**2\ndata_dict = {x: xval, y:yval}\n\nlosses, powers, grads = newtons_method(loss=power_loss,\n target=power,\n initial_guess=0,\n training_data=data_dict,\n num_iterations=20,\n grad2clip=1)\n\n\nplt.plot(powers, label='estimated power')\nplt.plot(losses, label='loss')\nplt.legend(loc='upper right')\nplt.title(\"Newton's Method on Quadractic\")\n\n#whoa -- much faster than before",
"_____no_output_____"
],
[
"xval = np.arange(0, 2, .01)\nyval = np.arange(0, 2, .01)**3\ndata_dict = {x: xval, y:yval}\n\nlosses, powers, grads = newtons_method(loss=power_loss,\n target=power,\n initial_guess=0,\n training_data=data_dict,\n num_iterations=20,\n grad2clip=1)\n\n\nplt.plot(powers, label='estimated power')\nplt.plot(losses, label='loss')\nplt.legend(loc='upper right')\nplt.title(\"Newton's Method on a Cubic\")\n",
"_____no_output_____"
],
[
"xval = np.arange(-2, 2, .02)\nyval = np.sin(xval)\n\ndata_dict = {x: xval, y:yval}\n\nlosses, coefvals, grads = newtons_method(loss=polynomial_loss,\n target=coefficients,\n initial_guess=np.zeros(approximation_order),\n training_data=data_dict,\n num_iterations=2)\n\nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, yval)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\n\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\n\n#no joke -- the error goes to 0 after 1 update step\n",
"_____no_output_____"
],
[
"#let's try something a little more complicated \n\nxval = np.arange(-2, 2, .02)\nyval = np.cos(2 * xval) + np.sin(xval + 1)\n\ndata_dict = {x: xval, y:yval}\n\nlosses, coefvals, grads = newtons_method(loss=polynomial_loss,\n target=coefficients,\n initial_guess=np.zeros(approximation_order),\n training_data=data_dict,\n num_iterations=5)\n\nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, yval)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\n\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\n\n#really fast -- actually Newton's method always converges this fast if\n#the model is polynomial\n",
"_____no_output_____"
],
[
"#just to put the above in context, let's compare to momentum\n\nxval = np.arange(-2, 2, .02)\nyval = np.cos(2 * xval) + np.sin(xval + 1)\n\ndata_dict = {x: xval, y:yval}\n\nlosses, coefvals = tf_builtin_optimization(loss=polynomial_loss,\n optimizer_class=tf.train.MomentumOptimizer,\n target=coefficients,\n training_data=data_dict,\n num_iterations=200,\n optimizer_kwargs={'learning_rate': 0.01,\n 'momentum': 0.9},\n ) \nx0 = coefvals[-1]\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, yval)\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}))\n\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')",
"Average time per iteration --> 0.00107\n"
]
],
[
[
"### Using External Optimizers",
"_____no_output_____"
]
],
[
[
"#actually, let's use an *external* optimizer -- not do \n#the optimization itself in tensorflow\nfrom scipy.optimize import minimize\n\n#you can see all the methods for optimization here: \n# https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html#scipy.optimize.minimize",
"_____no_output_____"
],
[
"#Ok here's the model we want to learn\nxval = np.arange(-2, 2, .02)\nyval = np.cosh(2 * xval) + np.sin(xval + 1)\n\nplt.plot(xval, yval)\nplt.title(\"Target to Learn\")",
"_____no_output_____"
],
[
"polynomial_loss",
"_____no_output_____"
],
[
"#we need to make a python function from our tensorflow model\n#(actually we could simply write the model directly in numpy\n#but ... since we already have it in Tensorflow might as well use it\n\ndef func_loss(vals):\n data_dict = {x: xval, \n y: yval,\n coefficients: vals}\n lossval = sess.run(polynomial_loss, feed_dict=data_dict)\n losses.append(lossval)\n return lossval",
"_____no_output_____"
],
[
"#Ok, so let's use a method that doesn't care about the derivative\n#specifically \"Nelder-Mead\" -- this is a simplex-based method\n\nlosses = []\nresult = minimize(func_loss, \n x0=np.zeros(6),\n method='Nelder-Mead')\n\nx0 = result.x\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, yval, label='True')\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}), label='Appox.')\nplt.legend(loc='upper center')\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\nplt.title('Loss with Nelder-Mead')\n",
"_____no_output_____"
],
[
"#OK now let's try a method that *does* care about the derivative\n#specifically, a method called L-BFGS -- this is basically \n#an approximate version of the newton's method. \n#It's called a \"quasi-second-order\" method because it uses only \n#first derivatives to get an approximation to the second derivative\n\n#to use it, we need *do* need to calculate the derivative\n#... and here's why tensorflow STILL matters even if we're using \n#an external optimizer\n\npolynomial_grad = tf.gradients(polynomial_loss, coefficients)[0]",
"_____no_output_____"
],
[
"#we need to create a function that returns loss and loss derivative\ndef func_loss_with_grad(vals):\n data_dict = {x: xval, \n y:yval,\n coefficients: vals}\n lossval, g = sess.run([polynomial_loss, polynomial_grad], \n feed_dict=data_dict)\n losses.append(lossval)\n return lossval, g.astype(np.float64)",
"_____no_output_____"
],
[
"#Ok, so let's see what happens with L-BFGS\nlosses = []\nresult = minimize(func_loss_with_grad, \n x0=np.zeros(6),\n method='L-BFGS-B', #approximation of newton's method\n jac=True #<-- meaning, we're telling minimizer \n #to use the derivative info -- the so-called\n #\"jacobian\"\n )\n\nx0 = result.x\nassign_op = tf.assign(coefficients, x0)\nsess.run(assign_op)\n\nplt.figure(figsize=(10, 3))\nplt.subplot(1, 2, 1)\nplt.plot(xval, yval, label='True')\nplt.plot(xval, sess.run(polynomial_model, feed_dict={x:xval}), label='Appox.')\nplt.legend(loc='upper center')\n\nplt.subplot(1, 2, 2)\nplt.plot(losses)\nplt.xlabel('iterations')\nplt.ylabel('loss')\nplt.title('Loss with L-BFGS')\n\n#substantially better than the non-derivative-based method\n#-- fewer interations are needed, loss curve is stabler, and final \n#results are better\n",
"_____no_output_____"
]
],
[
[
"## Deploying it in a real case",
"_____no_output_____"
]
],
[
[
"#ok let's load the neural data \nDATA_PATH = \"/home/chengxuz/Class/psych253_2018/data/ventral_neural_data.hdf5\"\nVentral_Dataset = h5py.File(DATA_PATH)\n\ncategories = Ventral_Dataset['image_meta']['category'][:] #array of category labels for all images --> shape == (5760,)\nunique_categories = np.unique(categories) #array of unique category labels --> shape == (8,)\n\nvar_levels = Ventral_Dataset['image_meta']['variation_level'][:] \n\nNeural_Data = Ventral_Dataset['time_averaged_trial_averaged'][:]\n\nnum_neurons = Neural_Data.shape[1]\nnum_categories = 8 ",
"_____no_output_____"
],
[
"categories[:10]",
"_____no_output_____"
],
[
"#we'll construct 8 one-vs-all vectors with {-1, 1} values\ncategory_matrix = np.array([2 * (categories == c) - 1 for \n c in unique_categories]).T.astype(int)",
"_____no_output_____"
],
[
"category_matrix[0]",
"_____no_output_____"
],
[
"sess = tf.Session()",
"_____no_output_____"
],
[
"#first, get initializers for W and b\ninitial_weights = tf.random_uniform(shape=(num_neurons, num_categories),\n minval=-1,\n maxval=1,\n seed=0)\n\ninitial_bias = tf.zeros(shape=(num_categories,))",
"_____no_output_____"
],
[
"#now construct the TF variables\nweights = tf.get_variable('weights', \n dtype=tf.float32,\n initializer=initial_weights)\n \nbias = tf.get_variable('bias', \n dtype=tf.float32,\n initializer=initial_bias)#initialize variables\ninit_op = tf.global_variables_initializer()\nsess.run(init_op)",
"_____no_output_____"
],
[
"#input slots for data and labels\n#note the batch size is \"None\" -- effectively meaning batches of \n#varying sizes can be used\nneural_data = tf.placeholder(shape=(None, num_neurons),\n dtype=tf.float32)\n\ncategory_labels = tf.placeholder(shape=(None, num_categories),\n dtype=tf.float32)",
"_____no_output_____"
],
[
"#now construct margins\nmargins = tf.matmul(neural_data, weights) + bias\n\n#the hinge loss\nhinge_loss = tf.maximum(0., 1. - category_labels * margins)\n\n#and take the mean of the loss over the batch\nhinge_loss_mean = tf.reduce_mean(hinge_loss)",
"_____no_output_____"
],
[
"#simple interface for using tensorflow built-in optimizer\n#as seen yesterclass\n\ndef tf_optimize(loss,\n optimizer_class,\n target,\n training_data,\n num_iterations,\n optimizer_args=(),\n optimizer_kwargs=None,\n sess=None,\n initial_guesses=None):\n \n if sess is None:\n sess = tf.Session()\n \n if optimizer_kwargs is None:\n optimizer_kwargs = {}\n \n #construct the optimizer\n optimizer = optimizer_class(*optimizer_args, \n **optimizer_kwargs)\n optimizer_op = optimizer.minimize(loss)\n \n #initialize variables\n init_op = tf.global_variables_initializer()\n sess.run(init_op)\n \n if initial_guesses is not None:\n for k, v in initial_guesses.items():\n op = tf.assign(k, v)\n sess.run(op)\n\n targetvals = []\n losses = []\n times = []\n for i in range(num_iterations):\n t0 = time.time()\n output = sess.run({'opt': optimizer_op,\n 'target': target,\n 'loss': loss}, \n feed_dict=training_data)\n times.append(time.time() - t0)\n targetvals.append(output['target'])\n losses.append(output['loss'])\n \n print('Average time per iteration --> %.5f' % np.mean(times))\n return np.array(losses), targetvals",
"_____no_output_____"
],
[
"#let's just focus on one batch of data for the moment\n\nbatch_size = 640\ndata_batch = Neural_Data[0: batch_size]\nlabel_batch = category_matrix[0: batch_size]\n\ndata_dict = {neural_data: data_batch,\n category_labels: label_batch}",
"_____no_output_____"
],
[
"#let's look at the weights and biases before training\nweight_vals, bias_vals = sess.run([weights, bias])\n\n#right, it's num_neurons x num_categories\nprint('weights shape:', weight_vals.shape)\n\n#let's look at some of the weights\nplt.hist(weight_vals[:, 0])\nplt.xlabel('Weight Value')\nplt.ylabel('Neuron Count')\nplt.title('Weights for Animals vs All')\n\nprint('biases:', bias_vals)",
"('weights shape:', (296, 8))\n('biases:', array([ 0., 0., 0., 0., 0., 0., 0., 0.], dtype=float32))\n"
],
[
"#ok so we'll use the Momentum optimizer to find weights and bias\n#for this classification problem\nlosses, targs = tf_optimize(loss=hinge_loss_mean,\n optimizer_class=tf.train.MomentumOptimizer,\n target=[],\n training_data=data_dict,\n num_iterations=100,\n optimizer_kwargs={'learning_rate': 1, 'momentum': 0.9},\n sess=sess)",
"Average time per iteration --> 0.00345\n"
],
[
"#losses decrease almost to 0\nplt.plot(losses)",
"_____no_output_____"
],
[
"weight_vals, bias_vals = sess.run([weights, bias])\n\n#right, it's num_neurons x num_categories\nweight_vals.shape\n\n#let's look at some of the weights\nplt.hist(weight_vals[:, 2])\nplt.xlabel('Weight Value')\nplt.ylabel('Neuron Count')\nplt.title('Weights for Faces vs All')\n\nprint('biases:', bias_vals)\n\n#ok so things have been learned!",
"('biases:', array([-5.6579237 , -5.38999081, -6.01903248, -5.87763357, -5.83938694,\n -5.5426507 , -5.03545904, -5.47604179], dtype=float32))\n"
],
[
"#how good are the results on training? \n\n#actually get the predictions by first getting the margins\nmargin_vals = sess.run(margins, feed_dict = data_dict)\n#now taking the argmax across categories\npred_inds = margin_vals.argmax(axis=1)\n#compare prediction to actual\ncorrect = pred_inds == label_batch.argmax(axis=1)\npct = correct.sum() / float(len(correct)) * 100\nprint('Training accuracy: %.2f%%' % pct)\n\n#Right, very accurate on training",
"Training accuracy: 99.53%\n"
]
],
[
[
"### Stochastic Gradient Descent",
"_____no_output_____"
]
],
[
[
"class BatchReader(object):\n \n def __init__(self, data_dict, batch_size, shuffle=True, shuffle_seed=0, pad=True):\n self.data_dict = data_dict \n self.batch_size = batch_size\n _k = data_dict.keys()[0]\n self.data_length = data_dict[_k].shape[0]\n self.total_batches = (self.data_length - 1) // self.batch_size + 1\n self.curr_batch_num = 0\n self.curr_epoch = 1\n self.pad = pad\n self.shuffle = shuffle\n self.shuffle_seed = shuffle_seed\n \n if self.shuffle:\n self.rng = np.random.RandomState(seed=self.shuffle_seed)\n self.perm = self.rng.permutation(self.data_length)\n \n def __iter__(self):\n return self\n\n def next(self):\n return self.get_next_batch()\n \n def get_next_batch(self):\n data = self.get_batch(self.curr_batch_num)\n self.increment_batch_num()\n return data\n\n def increment_batch_num(self):\n m = self.total_batches\n if (self.curr_batch_num >= m - 1):\n self.curr_epoch += 1\n if self.shuffle:\n self.perm = self.rng.permutation(self.data_length)\n self.curr_batch_num = (self.curr_batch_num + 1) % m\n\n def get_batch(self, cbn):\n data = {}\n startv = cbn * self.batch_size\n endv = (cbn + 1) * self.batch_size\n if self.pad and endv > self.data_length:\n startv = self.data_length - self.batch_size\n endv = startv + self.batch_size\n for k in self.data_dict:\n if self.shuffle:\n data[k] = self.data_dict[k][self.perm[startv: endv]]\n else:\n data[k] = self.data_dict[k][startv: endv]\n return data\n",
"_____no_output_____"
],
[
"class TF_Optimizer(object):\n \"\"\"Make the tensorflow SGD-style optimizer into a scikit-learn compatible class\n Uses BatchReader for stochastically getting data batches.\n \n model_func: function which returns tensorflow nodes for\n predictions, data_input\n \n loss_func: function which takes model_func prediction output node and \n returns tensorflow nodes for\n loss, label_input\n \n optimizer_class: which tensorflow optimizer class to when learning the model parameters\n \n batch_size: which batch size to use in training\n \n train_iterations: how many iterations to run the optimizer for \n --> this should really be picked automatically by like when the training\n error plateaus\n \n model_kwargs: dictionary of additional arguments for the model_func\n \n loss_kwargs: dictionary of additional arguments for the loss_func\n \n optimizer_args, optimizer_kwargs: additional position and keyword args for the\n optimizer class\n \n sess: tf session to use (will be constructed if not passed) \n \n train_shuffle: whether to shuffle example order during training\n \n \"\"\" \n \n def __init__(self, \n model_func,\n loss_func, \n optimizer_class,\n batch_size,\n train_iterations,\n model_kwargs=None,\n loss_kwargs=None,\n optimizer_args=(),\n optimizer_kwargs=None,\n sess=None,\n train_shuffle=False\n ):\n \n self.model_func = model_func\n if model_kwargs is None:\n model_kwargs = {}\n self.model_kwargs = model_kwargs\n self.loss_func = loss_func\n if loss_kwargs is None:\n loss_kwargs = {}\n self.loss_kwargs = loss_kwargs\n self.train_shuffle=train_shuffle\n \n self.train_iterations = train_iterations\n self.batch_size = batch_size\n \n if sess is None:\n sess = tf.Session()\n self.sess = sess\n \n if optimizer_kwargs is None:\n optimizer_kwargs = {}\n self.optimizer = optimizer_class(*optimizer_args, \n **optimizer_kwargs)\n \n def fit(self, train_data, train_labels):\n self.model, self.data_holder = self.model_func(**self.model_kwargs)\n self.loss, self.labels_holder = self.loss_func(self.model, **self.loss_kwargs)\n \n self.optimizer_op = self.optimizer.minimize(self.loss)\n \n data_dict = {self.data_holder: train_data,\n self.labels_holder: train_labels}\n train_data = BatchReader(data_dict=data_dict,\n batch_size=self.batch_size,\n shuffle=self.train_shuffle,\n shuffle_seed=0,\n pad=True)\n \n init_op = tf.global_variables_initializer()\n sess.run(init_op)\n \n self.losses = [] \n for i in range(self.train_iterations):\n data_batch = train_data.next()\n output = self.sess.run({'opt': self.optimizer_op,\n 'loss': self.loss}, \n feed_dict=data_batch)\n self.losses.append(output['loss'])\n \n def predict(self, test_data):\n data_dict = {self.data_holder: test_data}\n test_data = BatchReader(data_dict=data_dict,\n batch_size=self.batch_size,\n shuffle=False,\n pad=False) \n preds = []\n for i in range(test_data.total_batches):\n data_batch = test_data.get_batch(i)\n pred_batch = self.sess.run(self.model, feed_dict=data_batch)\n preds.append(pred_batch)\n return np.row_stack(preds)\n",
"_____no_output_____"
],
[
"def binarize_labels(labels):\n \"\"\"takes discrete-valued labels and binarizes them into {-1, 1}-value format\n returns:\n binarized_labels: of shape (num_stimuli, num_categories)\n unique_labels: actual labels indicating order of first axis in binarized_labels\n \"\"\"\n unique_labels = np.unique(labels)\n num_classes = len(unique_labels)\n binarized_labels = np.array([2 * (labels == c) - 1 for \n c in unique_labels]).T.astype(int)\n return binarized_labels, unique_labels\n \n\nclass TF_OVA_Classifier(TF_Optimizer):\n \"\"\"\n Subclass of TFOptimizer for use with categorizers. Basically, this class \n handles data binarization (in the fit method) and un-binarization \n (in the predict method), so that we can use the class with the function:\n \n train_and_test_scikit_classifier\n \n that we've previously defined. \n \n The predict method here implements a one-vs-all approach for multi-class problems.\n \"\"\"\n \n def fit(self, train_data, train_labels):\n #binarize labels\n num_features = train_data.shape[1]\n binarized_labels, classes_ = binarize_labels(train_labels)\n #set .classes_ attribute, since this is needed by train_and_test_scikit_classifier\n self.classes_ = classes_\n num_classes = len(classes_)\n #pass number of features and classes to the model construction \n #function that will be called when the fit method is called\n self.model_kwargs['num_features'] = num_features\n self.model_kwargs['num_classes'] = num_classes\n #now actually call the optimizer fit method\n TF_Optimizer.fit(self, train_data=train_data, \n train_labels=binarized_labels)\n \n def decision_function(self, test_data):\n #returns what are effectively the margins (for a linear classifier)\n return TF_Optimizer.predict(self, test_data)\n \n def predict(self, test_data):\n #use the one-vs-all rule for multiclass prediction. \n preds = self.decision_function(test_data)\n preds = np.argmax(preds, axis=1)\n classes_ = self.classes_\n return classes_[preds]",
"_____no_output_____"
],
[
"def linear_classifier(num_features, num_classes):\n \"\"\"generic form of a linear classifier, e.g. the model\n margins = np.dot(data, weight) + bias\n \"\"\"\n initial_weights = tf.zeros(shape=(num_features, \n num_classes), \n dtype=tf.float32)\n weights = tf.Variable(initial_weights, \n dtype=tf.float32,\n name='weights')\n initial_bias = tf.zeros(shape=(num_classes,))\n bias = tf.Variable(initial_bias, \n dtype=tf.float32,\n name='bias')\n \n data = tf.placeholder(shape=(None, num_features), dtype=tf.float32, name='data')\n margins = tf.add(tf.matmul(data, weights), bias, name='margins')\n return margins, data\n\n\ndef hinge_loss(margins):\n \"\"\"standard SVM hinge loss\n \"\"\"\n num_classes = margins.shape.as_list()[1]\n category_labels = tf.placeholder(shape=(None, num_classes),\n dtype=tf.float32, \n name='labels')\n h = tf.maximum(0., 1. - category_labels * margins, name='hinge_loss')\n hinge_loss_mean = tf.reduce_mean(h, name='hinge_loss_mean')\n return hinge_loss_mean, category_labels",
"_____no_output_____"
],
[
"#construct the classifier instance ... just like with scikit-learn\ncls = TF_OVA_Classifier(model_func=linear_classifier,\n loss_func=hinge_loss, \n batch_size=2500,\n train_iterations=1000,\n train_shuffle=True,\n optimizer_class=tf.train.MomentumOptimizer,\n optimizer_kwargs = {'learning_rate':10.,\n 'momentum': 0.99\n },\n sess=sess\n )",
"_____no_output_____"
],
[
"#ok let's try out our classifier on medium-variation data\ndata_subset = Neural_Data[var_levels=='V3']\ncategories_subset = categories[var_levels=='V3']\n\ncls.fit(data_subset, categories_subset)\n\nplt.plot(cls.losses)\nplt.xlabel('number of iterations')\nplt.ylabel('Hinge loss')",
"_____no_output_____"
],
[
"#ok how good was the actual training accuracy? \npreds = cls.predict(data_subset)\nacc = (preds == categories_subset).sum()\npct = acc / float(len(preds)) * 100\n\nprint('Training accuracy was %.2f%%' % pct)",
"Training accuracy was 99.77%\n"
]
],
[
[
"#### Side note on getting relevant tensors",
"_____no_output_____"
]
],
[
[
"#here's the linear mode constructed above:\nlin_model = cls.model\nprint(lin_model)",
"Tensor(\"margins:0\", shape=(?, 8), dtype=float32)\n"
],
[
"#suppose we want to access the weights / bias used in this model?\n#these can be accessed by the \"op.inputs\" attribute in TF\n\n#first, we see that this is the stage of the caluation\n#where the linear model (the margins) is put together by adding \n#the result of the matrix multiplication (\"MatMul_[somenumber]\")\n#to the bias\nlist(lin_model.op.inputs)",
"_____no_output_____"
],
[
"#so bias is just the first of these inputs\nbias_tensor = lin_model.op.inputs[1]\nbias_tensor",
"_____no_output_____"
],
[
"#if we follow up the calculation graph by taking apart\n#whatever was the inputs to the matmul stage, we see\n#the data and the weights\nmatmul_tensor = lin_model.op.inputs[0]\nlist(matmul_tensor.op.inputs)",
"_____no_output_____"
],
[
"#so the weights tensor is just the first of *these* inputs\nweights_tensor = matmul_tensor.op.inputs[1]\nweights_tensor",
"_____no_output_____"
],
[
"#putting this together, we could have done:\nweights_tensor = lin_model.op.inputs[0].op.inputs[1]\nweights_tensor",
"_____no_output_____"
]
],
[
[
"#### Regularization",
"_____no_output_____"
]
],
[
[
"#we can define other loss functions -- such as L2 regularization\n\ndef hinge_loss_l2reg(margins, C, square=False):\n #starts off the same as regular hinge loss\n num_classes = margins.shape.as_list()[1]\n category_labels = tf.placeholder(shape=(None, num_classes),\n dtype=tf.float32, \n name='labels')\n h = tf.maximum(0., 1 - category_labels * margins)\n #allows for squaring the hinge_loss optionally, as done in sklearn\n if square:\n h = h**2\n hinge_loss = tf.reduce_mean(h)\n \n #but how let's get the weights from the margins,\n #using the method just explored above\n weights = margins.op.inputs[0].op.inputs[1]\n #and get sum-square of the weights -- the 0.5 is for historical reasons\n reg_loss = 0.5*tf.reduce_mean(weights**2) \n \n #total up the loss from the two terms with constant C for weighting \n total_loss = C * hinge_loss + reg_loss\n \n return total_loss, category_labels",
"_____no_output_____"
],
[
"cls = TF_OVA_Classifier(model_func=linear_classifier,\n loss_func=hinge_loss_l2reg, \n loss_kwargs={'C':1},\n batch_size=2500,\n train_iterations=1000,\n train_shuffle=True,\n optimizer_class=tf.train.MomentumOptimizer,\n optimizer_kwargs = {'learning_rate':10.,\n 'momentum': 0.99\n },\n sess=sess,\n )",
"_____no_output_____"
],
[
"data_subset = Neural_Data[var_levels=='V3']\ncategories_subset = categories[var_levels=='V3']\n\ncls.fit(data_subset, categories_subset)\n\nplt.plot(cls.losses)\nplt.xlabel('number of iterations')\nplt.ylabel('Regularized Hinge loss')\n\npreds = cls.predict(data_subset)\nacc = (preds == categories_subset).sum()\npct = acc / float(len(preds)) * 100\n\nprint('Regularized training accuracy was %.2f%%' % pct)\n\n#unsuprisingly training accuracy goes down a bit with regularization\n#compared to before w/o regularization",
"Regularized training accuracy was 93.87%\n"
]
],
[
[
"### Integrating with cross validation tools",
"_____no_output_____"
]
],
[
[
"import cross_validation as cv",
"_____no_output_____"
],
[
"meta_array = np.core.records.fromarrays(Ventral_Dataset['image_meta'].values(),\n names=Ventral_Dataset['image_meta'].keys())",
"_____no_output_____"
],
[
"#the whole point of creating the TF_OVA_Classifier above\n#was that we could simply stick it into the cross-validation regime\n#that we'd previously set up for scikit-learn style classifiers\n#so now let's test it out\n\n#create some train/test splits\nsplits = cv.get_splits(meta_array, \n lambda x: x['object_name'], #we're balancing splits by object\n 5, \n 5, \n 35, \n train_filter=lambda x: (x['variation_level'] == 'V3'),\n test_filter=lambda x: (x['variation_level'] == 'V3'),)\n \n \n#here are the arguments to the classifier\nmodel_args = {'model_func': linear_classifier,\n 'loss_func': hinge_loss_l2reg, \n 'loss_kwargs': {'C':5e-2, #<-- a good regularization value\n }, \n 'batch_size': 2500,\n 'train_iterations': 1000, #<-- about the right number of steps\n 'train_shuffle': True,\n 'optimizer_class':tf.train.MomentumOptimizer,\n 'optimizer_kwargs': {'learning_rate':.1,\n 'momentum': 0.9},\n 'sess': sess}\n\n#so now it should work just like before\nres = cv.train_and_test_scikit_classifier(features=Neural_Data,\n labels=categories,\n splits=splits,\n model_class=TF_OVA_Classifier,\n model_args=model_args)\n",
"_____no_output_____"
],
[
"#yep!\nres[0]['test']['mean_accuracy']",
"_____no_output_____"
],
[
"#### Logistic Regression with Softmax loss ",
"_____no_output_____"
],
[
"def softmax_loss_l2reg(margins, C):\n \"\"\"this shows how to write softmax logistic regression \n using tensorflow\n \"\"\"\n num_classes = margins.shape.as_list()[1]\n category_labels = tf.placeholder(shape=(None, num_classes),\n dtype=tf.float32, \n name='labels')\n \n #get the softmax from the margins\n probs = tf.nn.softmax(margins) \n #extract just the prob value for the correct category\n #(we have the (cats + 1)/2 thing because the category_labels\n #come in as {-1, +1} values but we need {0,1} for this purpose)\n probs_cat_vec = probs * ((category_labels + 1.) / 2.)\n #sum up over categories (actually only one term, that for\n #the correct category, contributes on each row)\n probs_cat = tf.reduce_mean(probs_cat_vec, axis=1)\n #-log \n neglogprob = -tf.log(probs_cat)\n #average over the batch\n log_loss = tf.reduce_mean(neglogprob)\n \n weights = cls.model.op.inputs[0].op.inputs[1]\n reg_loss = 0.5*tf.reduce_mean(tf.square(weights)) \n \n total_loss = C * log_loss + reg_loss\n return total_loss, category_labels",
"_____no_output_____"
],
[
"model_args={'model_func': linear_classifier,\n 'model_kwargs': {},\n 'loss_func': softmax_loss_l2reg, \n 'loss_kwargs': {'C': 5e-3},\n 'batch_size': 2500,\n 'train_iterations': 1000,\n 'train_shuffle': True,\n 'optimizer_class':tf.train.MomentumOptimizer,\n 'optimizer_kwargs': {'learning_rate': 1.,\n 'momentum': 0.9\n },\n 'sess': sess}\n\nres = cv.train_and_test_scikit_classifier(features=Neural_Data,\n labels=categories,\n splits=splits,\n model_class=TF_OVA_Classifier,\n model_args=model_args)\n",
"_____no_output_____"
],
[
"res[0]['test']['mean_accuracy']\n#ok works reasonably well",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a64dc82c36465cacc694846338e946f8a4291d8
| 33,276 |
ipynb
|
Jupyter Notebook
|
First Neural Network with Keras API.ipynb
|
MuizAlvi/Machine_Learning_and_Deep_Learning_models
|
6a5d7b8904ed36d3f57dccb66a27d49661a4736a
|
[
"Unlicense"
] | 1 |
2021-07-03T07:00:23.000Z
|
2021-07-03T07:00:23.000Z
|
First Neural Network with Keras API.ipynb
|
MuizAlvi/Machine_Learning_and_Deep_Learning_models
|
6a5d7b8904ed36d3f57dccb66a27d49661a4736a
|
[
"Unlicense"
] | null | null | null |
First Neural Network with Keras API.ipynb
|
MuizAlvi/Machine_Learning_and_Deep_Learning_models
|
6a5d7b8904ed36d3f57dccb66a27d49661a4736a
|
[
"Unlicense"
] | 1 |
2021-05-09T22:05:45.000Z
|
2021-05-09T22:05:45.000Z
| 67.224242 | 17,060 | 0.770225 |
[
[
[
"## Problem Statement",
"_____no_output_____"
],
[
" An experimental drug was tested on 2100 individual in a clinical trial. The ages of participants ranged from thirteen to hundred. Half of the participants were under the age of 65 years old, the other half were 65 years or older.\n Ninety five percent patients that were 65 years or older experienced side effects. Ninety five percent patients under 65 years experienced no side effects.\n\n You have to build a program that takes the age of a participant as input and predicts whether this patient has suffered from a side effect or not.\n\n Steps:\n\n • Generate a random dataset that adheres to these statements\n • Divide the dataset into Training (90%) and Validation (10%) set\n • Build a Simple Sequential Model\n • Train and Validate the Model on the dataset\n • Randomly choose 20% data from dataset as Test set\n • Plot predictions made by the Model on Test set\n",
"_____no_output_____"
],
[
"## Generating Dataset",
"_____no_output_____"
]
],
[
[
"import numpy as np\nfrom random import randint\nfrom sklearn.utils import shuffle\nfrom sklearn.preprocessing import MinMaxScaler",
"_____no_output_____"
],
[
"train_labels = [] # one means side effect experienced, zero means no side effect experienced\ntrain_samples = []",
"_____no_output_____"
],
[
"for i in range(50):\n # The 5% of younger individuals who did experience side effects\n random_younger = randint(13, 64)\n train_samples.append(random_younger)\n train_labels.append(1)\n \n # The 5% of older individuals who did not experience side effects\n random_older = randint(65, 100)\n train_samples.append(random_older)\n train_labels.append(0)\n \n\nfor i in range(1000):\n # The 95% of younger individuals who did not experience side effects\n random_younger = randint(13, 64)\n train_samples.append(random_younger)\n train_labels.append(0)\n\n # The 95% of older individuals who did experience side effects\n random_older = randint(65, 100)\n train_samples.append(random_older)\n train_labels.append(1)",
"_____no_output_____"
],
[
"train_labels = np.array(train_labels)\ntrain_samples = np.array(train_samples)\ntrain_labels, train_samples = shuffle(train_labels, train_samples) # randomly shuffles each individual array, removing any order imposed on the data set during the creation process",
"_____no_output_____"
],
[
"scaler = MinMaxScaler(feature_range = (0, 1)) # specifying scale (range: 0 to 1)\nscaled_train_samples = scaler.fit_transform(train_samples.reshape(-1,1)) # transforms our data scale (range: 13 to 100) into the one specified above (range: 0 to 1), we use the reshape fucntion as fit_transform doesnot accept 1-D data by default hence we need to reshape accordingly here",
"_____no_output_____"
]
],
[
[
"## Building a Sequential Model",
"_____no_output_____"
]
],
[
[
"import tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Activation, Dense\nfrom tensorflow.keras.optimizers import Adam\nfrom tensorflow.keras.metrics import categorical_crossentropy",
"_____no_output_____"
],
[
"model = Sequential([\n Dense(units = 16, input_shape = (1,), activation = 'relu'), \n Dense(units = 32, activation = 'relu'), \n Dense(units = 2, activation = 'softmax')\n])",
"_____no_output_____"
],
[
"model.summary()",
"Model: \"sequential\"\n_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\ndense (Dense) (None, 16) 32 \n_________________________________________________________________\ndense_1 (Dense) (None, 32) 544 \n_________________________________________________________________\ndense_2 (Dense) (None, 2) 66 \n=================================================================\nTotal params: 642\nTrainable params: 642\nNon-trainable params: 0\n_________________________________________________________________\n"
]
],
[
[
"## Training the Model",
"_____no_output_____"
]
],
[
[
"model.compile(optimizer = Adam(learning_rate = 0.0001), loss = 'sparse_categorical_crossentropy', metrics = ['accuracy'])",
"_____no_output_____"
],
[
"model.fit(x = scaled_train_samples, y = train_labels, validation_split = 0.1, batch_size = 10, epochs = 30, shuffle = True, verbose = 2)",
"Train on 1890 samples, validate on 210 samples\nEpoch 1/30\n1890/1890 - 1s - loss: 0.6592 - accuracy: 0.5185 - val_loss: 0.6448 - val_accuracy: 0.6286\nEpoch 2/30\n1890/1890 - 0s - loss: 0.6384 - accuracy: 0.6386 - val_loss: 0.6158 - val_accuracy: 0.7143\nEpoch 3/30\n1890/1890 - 0s - loss: 0.6057 - accuracy: 0.7169 - val_loss: 0.5762 - val_accuracy: 0.7714\nEpoch 4/30\n1890/1890 - 1s - loss: 0.5608 - accuracy: 0.7926 - val_loss: 0.5317 - val_accuracy: 0.8143\nEpoch 5/30\n1890/1890 - 1s - loss: 0.5175 - accuracy: 0.8323 - val_loss: 0.4916 - val_accuracy: 0.8429\nEpoch 6/30\n1890/1890 - 1s - loss: 0.4809 - accuracy: 0.8487 - val_loss: 0.4575 - val_accuracy: 0.8714\nEpoch 7/30\n1890/1890 - 1s - loss: 0.4479 - accuracy: 0.8704 - val_loss: 0.4276 - val_accuracy: 0.8714\nEpoch 8/30\n1890/1890 - 1s - loss: 0.4188 - accuracy: 0.8751 - val_loss: 0.4009 - val_accuracy: 0.8762\nEpoch 9/30\n1890/1890 - 1s - loss: 0.3927 - accuracy: 0.8852 - val_loss: 0.3784 - val_accuracy: 0.8905\nEpoch 10/30\n1890/1890 - 1s - loss: 0.3705 - accuracy: 0.9005 - val_loss: 0.3587 - val_accuracy: 0.9000\nEpoch 11/30\n1890/1890 - 0s - loss: 0.3514 - accuracy: 0.9095 - val_loss: 0.3427 - val_accuracy: 0.9000\nEpoch 12/30\n1890/1890 - 1s - loss: 0.3353 - accuracy: 0.9079 - val_loss: 0.3287 - val_accuracy: 0.9143\nEpoch 13/30\n1890/1890 - 0s - loss: 0.3222 - accuracy: 0.9111 - val_loss: 0.3175 - val_accuracy: 0.9143\nEpoch 14/30\n1890/1890 - 1s - loss: 0.3110 - accuracy: 0.9164 - val_loss: 0.3085 - val_accuracy: 0.9143\nEpoch 15/30\n1890/1890 - 0s - loss: 0.3019 - accuracy: 0.9190 - val_loss: 0.3011 - val_accuracy: 0.9190\nEpoch 16/30\n1890/1890 - 0s - loss: 0.2944 - accuracy: 0.9217 - val_loss: 0.2949 - val_accuracy: 0.9190\nEpoch 17/30\n1890/1890 - 0s - loss: 0.2880 - accuracy: 0.9222 - val_loss: 0.2893 - val_accuracy: 0.9286\nEpoch 18/30\n1890/1890 - 0s - loss: 0.2827 - accuracy: 0.9265 - val_loss: 0.2848 - val_accuracy: 0.9286\nEpoch 19/30\n1890/1890 - 0s - loss: 0.2781 - accuracy: 0.9307 - val_loss: 0.2825 - val_accuracy: 0.9286\nEpoch 20/30\n1890/1890 - 0s - loss: 0.2746 - accuracy: 0.9291 - val_loss: 0.2793 - val_accuracy: 0.9286\nEpoch 21/30\n1890/1890 - 0s - loss: 0.2714 - accuracy: 0.9270 - val_loss: 0.2760 - val_accuracy: 0.9333\nEpoch 22/30\n1890/1890 - 0s - loss: 0.2688 - accuracy: 0.9296 - val_loss: 0.2740 - val_accuracy: 0.9333\nEpoch 23/30\n1890/1890 - 0s - loss: 0.2663 - accuracy: 0.9344 - val_loss: 0.2731 - val_accuracy: 0.9286\nEpoch 24/30\n1890/1890 - 0s - loss: 0.2645 - accuracy: 0.9302 - val_loss: 0.2707 - val_accuracy: 0.9333\nEpoch 25/30\n1890/1890 - 1s - loss: 0.2627 - accuracy: 0.9302 - val_loss: 0.2689 - val_accuracy: 0.9333\nEpoch 26/30\n1890/1890 - 0s - loss: 0.2612 - accuracy: 0.9354 - val_loss: 0.2682 - val_accuracy: 0.9286\nEpoch 27/30\n1890/1890 - 0s - loss: 0.2597 - accuracy: 0.9333 - val_loss: 0.2667 - val_accuracy: 0.9333\nEpoch 28/30\n1890/1890 - 0s - loss: 0.2585 - accuracy: 0.9328 - val_loss: 0.2653 - val_accuracy: 0.9333\nEpoch 29/30\n1890/1890 - 0s - loss: 0.2572 - accuracy: 0.9365 - val_loss: 0.2640 - val_accuracy: 0.9333\nEpoch 30/30\n1890/1890 - 0s - loss: 0.2562 - accuracy: 0.9370 - val_loss: 0.2630 - val_accuracy: 0.9333\n"
]
],
[
[
"## Preprocessing Test Data",
"_____no_output_____"
]
],
[
[
"test_labels = []\ntest_samples = []",
"_____no_output_____"
],
[
"for i in range(10):\n # The 5% of younger individuals who did experience side effects\n random_younger = randint(13, 64)\n test_samples.append(random_younger)\n test_labels.append(1)\n \n # The 5% of older individuals who did not experience side effects\n random_older = randint(65, 100)\n test_samples.append(random_older)\n test_labels.append(0)\n \n\nfor i in range(200):\n # The 95% of younger individuals who did not experience side effects\n random_younger = randint(13, 64)\n test_samples.append(random_younger)\n test_labels.append(0)\n\n # The 95% of older individuals who did experience side effects\n random_older = randint(65, 100)\n test_samples.append(random_older)\n test_labels.append(1)",
"_____no_output_____"
],
[
"test_labels = np.array(test_labels)\ntest_samples = np.array(test_samples)\ntest_labels, test_samples = shuffle(test_labels, test_samples)",
"_____no_output_____"
],
[
"scaled_test_samples = scaler.fit_transform(test_samples.reshape(-1,1))",
"_____no_output_____"
]
],
[
[
"## Testing the Model using Predictions ",
"_____no_output_____"
]
],
[
[
"predictions = model.predict(x = scaled_test_samples, batch_size = 10, verbose = 0)",
"_____no_output_____"
],
[
"rounded_predictions = np.argmax(predictions, axis = -1)",
"_____no_output_____"
]
],
[
[
"## Preparing Confusion Matrix",
"_____no_output_____"
]
],
[
[
"from sklearn.metrics import confusion_matrix\nimport itertools\nimport matplotlib.pyplot as plt",
"_____no_output_____"
],
[
"cm = confusion_matrix(y_true = test_labels, y_pred = rounded_predictions)",
"_____no_output_____"
],
[
"# This function has been taken from the website of scikit Learn. link: https://scikit-learn.org/0.18/auto_examples/model_selection/plot_confusion_matrix.html\ndef plot_confusion_matrix(cm, classes,\n normalize=False,\n title='Confusion matrix',\n cmap=plt.cm.Blues):\n \"\"\"\n This function prints and plots the confusion matrix.\n Normalization can be applied by setting `normalize=True`.\n \"\"\"\n plt.imshow(cm, interpolation='nearest', cmap=cmap)\n plt.title(title)\n plt.colorbar()\n tick_marks = np.arange(len(classes))\n plt.xticks(tick_marks, classes, rotation=45)\n plt.yticks(tick_marks, classes)\n\n if normalize:\n cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n print(\"Normalized confusion matrix\")\n else:\n print('Confusion matrix, without normalization')\n\n print(cm)\n\n thresh = cm.max() / 2.\n for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n plt.text(j, i, cm[i, j],\n horizontalalignment=\"center\",\n color=\"white\" if cm[i, j] > thresh else \"black\")\n\n plt.tight_layout()\n plt.ylabel('True label')\n plt.xlabel('Predicted label')",
"_____no_output_____"
]
],
[
[
"## Plotting Predictions using Confusion Matrix",
"_____no_output_____"
]
],
[
[
"cm_plot_labels = ['no_side_effects', 'had_side_effects']\nplot_confusion_matrix(cm = cm, classes = cm_plot_labels, title = 'Confusion Matrix')",
"Confusion matrix, without normalization\n[[193 17]\n [ 10 200]]\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a64de138f883d62d6fbb05a929a834d17532232
| 14,556 |
ipynb
|
Jupyter Notebook
|
examples/notebooks/mixture_of_gaussians.ipynb
|
sylvchev/pymanopt
|
d16b088df50d695f84f2d75bbb821d10d9b85d33
|
[
"BSD-3-Clause"
] | 459 |
2016-02-18T18:53:30.000Z
|
2022-03-30T07:28:53.000Z
|
examples/notebooks/mixture_of_gaussians.ipynb
|
sylvchev/pymanopt
|
d16b088df50d695f84f2d75bbb821d10d9b85d33
|
[
"BSD-3-Clause"
] | 131 |
2016-02-19T16:17:44.000Z
|
2022-03-21T09:27:11.000Z
|
examples/notebooks/mixture_of_gaussians.ipynb
|
sylvchev/pymanopt
|
d16b088df50d695f84f2d75bbb821d10d9b85d33
|
[
"BSD-3-Clause"
] | 127 |
2016-03-11T18:47:15.000Z
|
2022-03-11T06:07:37.000Z
| 39.554348 | 648 | 0.544724 |
[
[
[
"# Riemannian Optimisation with Pymanopt for Inference in MoG models",
"_____no_output_____"
],
[
"The Mixture of Gaussians (MoG) model assumes that datapoints $\\mathbf{x}_i\\in\\mathbb{R}^d$ follow a distribution described by the following probability density function:\n\n$p(\\mathbf{x}) = \\sum_{m=1}^M \\pi_m p_\\mathcal{N}(\\mathbf{x};\\mathbf{\\mu}_m,\\mathbf{\\Sigma}_m)$ where $\\pi_m$ is the probability that the data point belongs to the $m^\\text{th}$ mixture component and $p_\\mathcal{N}(\\mathbf{x};\\mathbf{\\mu}_m,\\mathbf{\\Sigma}_m)$ is the probability density function of a multivariate Gaussian distribution with mean $\\mathbf{\\mu}_m \\in \\mathbb{R}^d$ and psd covariance matrix $\\mathbf{\\Sigma}_m \\in \\{\\mathbf{M}\\in\\mathbb{R}^{d\\times d}: \\mathbf{M}\\succeq 0\\}$.\n\nAs an example consider the mixture of three Gaussians with means\n$\\mathbf{\\mu}_1 = \\begin{bmatrix} -4 \\\\ 1 \\end{bmatrix}$,\n$\\mathbf{\\mu}_2 = \\begin{bmatrix} 0 \\\\ 0 \\end{bmatrix}$ and\n$\\mathbf{\\mu}_3 = \\begin{bmatrix} 2 \\\\ -1 \\end{bmatrix}$, covariances\n$\\mathbf{\\Sigma}_1 = \\begin{bmatrix} 3 & 0 \\\\ 0 & 1 \\end{bmatrix}$,\n$\\mathbf{\\Sigma}_2 = \\begin{bmatrix} 1 & 1 \\\\ 1 & 3 \\end{bmatrix}$ and\n$\\mathbf{\\Sigma}_3 = \\begin{bmatrix} 0.5 & 0 \\\\ 0 & 0.5 \\end{bmatrix}$\nand mixture probability vector $\\boldsymbol{\\pi}=\\left[0.1, 0.6, 0.3\\right]^\\top$.\nLet's generate $N=1000$ samples of that MoG model and scatter plot the samples:",
"_____no_output_____"
]
],
[
[
"import autograd.numpy as np\nnp.set_printoptions(precision=2)\nimport matplotlib.pyplot as plt\n%matplotlib inline\n\n# Number of data points\nN = 1000\n\n# Dimension of each data point\nD = 2\n\n# Number of clusters\nK = 3\n\npi = [0.1, 0.6, 0.3]\nmu = [np.array([-4, 1]), np.array([0, 0]), np.array([2, -1])]\nSigma = [np.array([[3, 0],[0, 1]]), np.array([[1, 1.], [1, 3]]), 0.5 * np.eye(2)]\n\ncomponents = np.random.choice(K, size=N, p=pi)\nsamples = np.zeros((N, D))\n# For each component, generate all needed samples\nfor k in range(K):\n # indices of current component in X\n indices = k == components\n # number of those occurrences\n n_k = indices.sum()\n if n_k > 0:\n samples[indices, :] = np.random.multivariate_normal(mu[k], Sigma[k], n_k)\n\ncolors = ['r', 'g', 'b', 'c', 'm']\nfor k in range(K):\n indices = k == components\n plt.scatter(samples[indices, 0], samples[indices, 1], alpha=0.4, color=colors[k%K])\nplt.axis('equal')\nplt.show()",
"_____no_output_____"
]
],
[
[
"Given a data sample the de facto standard method to infer the parameters is the [expectation maximisation](https://en.wikipedia.org/wiki/Expectation-maximization_algorithm) (EM) algorithm that, in alternating so-called E and M steps, maximises the log-likelihood of the data.\nIn [arXiv:1506.07677](http://arxiv.org/pdf/1506.07677v1.pdf) Hosseini and Sra propose Riemannian optimisation as a powerful counterpart to EM. Importantly, they introduce a reparameterisation that leaves local optima of the log-likelihood unchanged while resulting in a geodesically convex optimisation problem over a product manifold $\\prod_{m=1}^M\\mathcal{PD}^{(d+1)\\times(d+1)}$ of manifolds of $(d+1)\\times(d+1)$ symmetric positive definite matrices.\nThe proposed method is on par with EM and shows less variability in running times.\n\nThe reparameterised optimisation problem for augmented data points $\\mathbf{y}_i=[\\mathbf{x}_i^\\top, 1]^\\top$ can be stated as follows:\n\n$$\\min_{(\\mathbf{S}_1, ..., \\mathbf{S}_m, \\boldsymbol{\\nu}) \\in \\mathcal{D}}\n-\\sum_{n=1}^N\\log\\left(\n\\sum_{m=1}^M \\frac{\\exp(\\nu_m)}{\\sum_{k=1}^M\\exp(\\nu_k)}\nq_\\mathcal{N}(\\mathbf{y}_n;\\mathbf{S}_m)\n\\right)$$\n\nwhere\n\n* $\\mathcal{D} := \\left(\\prod_{m=1}^M \\mathcal{PD}^{(d+1)\\times(d+1)}\\right)\\times\\mathbb{R}^{M-1}$ is the search space\n* $\\mathcal{PD}^{(d+1)\\times(d+1)}$ is the manifold of symmetric positive definite\n$(d+1)\\times(d+1)$ matrices\n* $\\mathcal{\\nu}_m = \\log\\left(\\frac{\\alpha_m}{\\alpha_M}\\right), \\ m=1, ..., M-1$ and $\\nu_M=0$\n* $q_\\mathcal{N}(\\mathbf{y}_n;\\mathbf{S}_m) =\n2\\pi\\exp\\left(\\frac{1}{2}\\right)\n|\\operatorname{det}(\\mathbf{S}_m)|^{-\\frac{1}{2}}(2\\pi)^{-\\frac{d+1}{2}}\n\\exp\\left(-\\frac{1}{2}\\mathbf{y}_i^\\top\\mathbf{S}_m^{-1}\\mathbf{y}_i\\right)$\n\n**Optimisation problems like this can easily be solved using Pymanopt – even without the need to differentiate the cost function manually!**\n\nSo let's infer the parameters of our toy example by Riemannian optimisation using Pymanopt:",
"_____no_output_____"
]
],
[
[
"import sys\nsys.path.insert(0,\"../..\")\n\nimport autograd.numpy as np\nfrom autograd.scipy.special import logsumexp\n\nimport pymanopt\nfrom pymanopt.manifolds import Product, Euclidean, SymmetricPositiveDefinite\nfrom pymanopt import Problem\nfrom pymanopt.solvers import SteepestDescent\n\n# (1) Instantiate the manifold\nmanifold = Product([SymmetricPositiveDefinite(D+1, k=K), Euclidean(K-1)])\n\n# (2) Define cost function\n# The parameters must be contained in a list theta.\[email protected]\ndef cost(S, v):\n # Unpack parameters\n nu = np.append(v, 0)\n \n logdetS = np.expand_dims(np.linalg.slogdet(S)[1], 1)\n y = np.concatenate([samples.T, np.ones((1, N))], axis=0)\n\n # Calculate log_q\n y = np.expand_dims(y, 0)\n \n # 'Probability' of y belonging to each cluster\n log_q = -0.5 * (np.sum(y * np.linalg.solve(S, y), axis=1) + logdetS)\n\n alpha = np.exp(nu)\n alpha = alpha / np.sum(alpha)\n alpha = np.expand_dims(alpha, 1)\n \n loglikvec = logsumexp(np.log(alpha) + log_q, axis=0)\n return -np.sum(loglikvec)\n\nproblem = Problem(manifold=manifold, cost=cost, verbosity=1)\n\n# (3) Instantiate a Pymanopt solver\nsolver = SteepestDescent()\n\n# let Pymanopt do the rest\nXopt = solver.solve(problem)",
"_____no_output_____"
]
],
[
[
"Once Pymanopt has finished the optimisation we can obtain the inferred parameters as follows:",
"_____no_output_____"
]
],
[
[
"mu1hat = Xopt[0][0][0:2,2:3]\nSigma1hat = Xopt[0][0][:2, :2] - mu1hat.dot(mu1hat.T)\nmu2hat = Xopt[0][1][0:2,2:3]\nSigma2hat = Xopt[0][1][:2, :2] - mu2hat.dot(mu2hat.T)\nmu3hat = Xopt[0][2][0:2,2:3]\nSigma3hat = Xopt[0][2][:2, :2] - mu3hat.dot(mu3hat.T)\npihat = np.exp(np.concatenate([Xopt[1], [0]], axis=0))\npihat = pihat / np.sum(pihat)",
"_____no_output_____"
]
],
[
[
"And convince ourselves that the inferred parameters are close to the ground truth parameters.\n\nThe ground truth parameters $\\mathbf{\\mu}_1, \\mathbf{\\Sigma}_1, \\mathbf{\\mu}_2, \\mathbf{\\Sigma}_2, \\mathbf{\\mu}_3, \\mathbf{\\Sigma}_3, \\pi_1, \\pi_2, \\pi_3$:",
"_____no_output_____"
]
],
[
[
"print(mu[0])\nprint(Sigma[0])\nprint(mu[1])\nprint(Sigma[1])\nprint(mu[2])\nprint(Sigma[2])\nprint(pi[0])\nprint(pi[1])\nprint(pi[2])",
"_____no_output_____"
]
],
[
[
"And the inferred parameters $\\hat{\\mathbf{\\mu}}_1, \\hat{\\mathbf{\\Sigma}}_1, \\hat{\\mathbf{\\mu}}_2, \\hat{\\mathbf{\\Sigma}}_2, \\hat{\\mathbf{\\mu}}_3, \\hat{\\mathbf{\\Sigma}}_3, \\hat{\\pi}_1, \\hat{\\pi}_2, \\hat{\\pi}_3$:",
"_____no_output_____"
]
],
[
[
"print(mu1hat)\nprint(Sigma1hat)\nprint(mu2hat)\nprint(Sigma2hat)\nprint(mu3hat)\nprint(Sigma3hat)\nprint(pihat[0])\nprint(pihat[1])\nprint(pihat[2])",
"_____no_output_____"
]
],
[
[
"Et voilà – this was a brief demonstration of how to do inference for MoG models by performing Manifold optimisation using Pymanopt.",
"_____no_output_____"
],
[
"## When Things Go Astray\n\nA well-known problem when fitting parameters of a MoG model is that one Gaussian may collapse onto a single data point resulting in singular covariance matrices (cf. e.g. p. 434 in Bishop, C. M. \"Pattern Recognition and Machine Learning.\" 2001). This problem can be avoided by the following heuristic: if a component's covariance matrix is close to being singular we reset its mean and covariance matrix. Using Pymanopt this can be accomplished by using an appropriate line search rule (based on [LineSearchBackTracking](https://github.com/pymanopt/pymanopt/blob/master/pymanopt/solvers/linesearch.py)) -- here we demonstrate this approach:",
"_____no_output_____"
]
],
[
[
"class LineSearchMoG:\n \"\"\"\n Back-tracking line-search that checks for close to singular matrices.\n \"\"\"\n\n def __init__(self, contraction_factor=.5, optimism=2,\n suff_decr=1e-4, maxiter=25, initial_stepsize=1):\n self.contraction_factor = contraction_factor\n self.optimism = optimism\n self.suff_decr = suff_decr\n self.maxiter = maxiter\n self.initial_stepsize = initial_stepsize\n\n self._oldf0 = None\n\n def search(self, objective, manifold, x, d, f0, df0):\n \"\"\"\n Function to perform backtracking line-search.\n Arguments:\n - objective\n objective function to optimise\n - manifold\n manifold to optimise over\n - x\n starting point on the manifold\n - d\n tangent vector at x (descent direction)\n - df0\n directional derivative at x along d\n Returns:\n - stepsize\n norm of the vector retracted to reach newx from x\n - newx\n next iterate suggested by the line-search\n \"\"\"\n # Compute the norm of the search direction\n norm_d = manifold.norm(x, d)\n\n if self._oldf0 is not None:\n # Pick initial step size based on where we were last time.\n alpha = 2 * (f0 - self._oldf0) / df0\n # Look a little further\n alpha *= self.optimism\n else:\n alpha = self.initial_stepsize / norm_d\n alpha = float(alpha)\n\n # Make the chosen step and compute the cost there.\n newx, newf, reset = self._newxnewf(x, alpha * d, objective, manifold)\n step_count = 1\n \n # Backtrack while the Armijo criterion is not satisfied\n while (newf > f0 + self.suff_decr * alpha * df0 and\n step_count <= self.maxiter and\n not reset):\n\n # Reduce the step size\n alpha = self.contraction_factor * alpha\n\n # and look closer down the line\n newx, newf, reset = self._newxnewf(x, alpha * d, objective, manifold)\n\n step_count = step_count + 1\n\n # If we got here without obtaining a decrease, we reject the step.\n if newf > f0 and not reset:\n alpha = 0\n newx = x\n\n stepsize = alpha * norm_d\n\n self._oldf0 = f0\n\n return stepsize, newx\n \n def _newxnewf(self, x, d, objective, manifold):\n newx = manifold.retr(x, d)\n try:\n newf = objective(newx)\n except np.linalg.LinAlgError:\n replace = np.asarray([np.linalg.matrix_rank(newx[0][k, :, :]) != newx[0][0, :, :].shape[0]\n for k in range(newx[0].shape[0])])\n x[0][replace, :, :] = manifold.rand()[0][replace, :, :]\n return x, objective(x), True\n return newx, newf, False",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
]
] |
4a650d485d9d5ecbfa3665a4f02631152f3f2283
| 327,555 |
ipynb
|
Jupyter Notebook
|
d2l/chapter_computer-vision/neural-style.ipynb
|
atlasbioinfo/myDLNotes_Pytorch
|
fada6ab56af340cd5ec6cc4dfd5e749af16a6ed4
|
[
"MIT"
] | null | null | null |
d2l/chapter_computer-vision/neural-style.ipynb
|
atlasbioinfo/myDLNotes_Pytorch
|
fada6ab56af340cd5ec6cc4dfd5e749af16a6ed4
|
[
"MIT"
] | null | null | null |
d2l/chapter_computer-vision/neural-style.ipynb
|
atlasbioinfo/myDLNotes_Pytorch
|
fada6ab56af340cd5ec6cc4dfd5e749af16a6ed4
|
[
"MIT"
] | null | null | null | 128.352273 | 98,305 | 0.825483 |
[
[
[
"# 样式迁移\n\n如果你是一位摄影爱好者,你也许接触过滤镜。它能改变照片的颜色样式,从而使风景照更加锐利或者令人像更加美白。但一个滤镜通常只能改变照片的某个方面。如果要照片达到理想中的样式,你可能需要尝试大量不同的组合。这个过程的复杂程度不亚于模型调参。 \n\n在本节中,我们将介绍如何使用卷积神经网络,自动将一个图像中的样式应用在另一图像之上,即*样式迁移*(style transfer) :cite:`Gatys.Ecker.Bethge.2016`。\n这里我们需要两张输入图像:一张是*内容图像*,另一张是*样式图像*。\n我们将使用神经网络修改内容图像,使其在样式上接近样式图像。\n例如, :numref:`fig_style_transfer` 中的内容图像为本书作者在西雅图郊区的雷尼尔山国家公园拍摄的风景照,而样式图像则是一幅主题为秋天橡树的油画。\n最终输出的合成图像应用了样式图像的油画笔触让整体颜色更加鲜艳,同时保留了内容图像中物体主体的形状。\n\n\n:label:`fig_style_transfer`\n\n## 方法\n\n:numref:`fig_style_transfer_model` 用简单的例子阐述了基于卷积神经网络的样式迁移方法。\n首先,我们初始化合成图像,例如将其初始化为内容图像。\n该合成图像是样式迁移过程中唯一需要更新的变量,即样式迁移所需迭代的模型参数。\n然后,我们选择一个预训练的卷积神经网络来抽取图像的特征,其中的模型参数在训练中无须更新。\n这个深度卷积神经网络凭借多个层逐级抽取图像的特征,我们可以选择其中某些层的输出作为内容特征或样式特征。\n以 :numref:`fig_style_transfer_model` 为例,这里选取的预训练的神经网络含有3个卷积层,其中第二层输出内容特征,第一层和第三层输出样式特征。 \n\n\n:label:`fig_style_transfer_model`\n\n接下来,我们通过正向传播(实线箭头方向)计算样式迁移的损失函数,并通过反向传播(虚线箭头方向)迭代模型参数,即不断更新合成图像。\n样式迁移常用的损失函数由3部分组成:\n(i) *内容损失*使合成图像与内容图像在内容特征上接近;\n(ii) *样式损失*使合成图像与样式图像在样式特征上接近;\n(iii) *总变差损失*则有助于减少合成图像中的噪点。\n最后,当模型训练结束时,我们输出样式迁移的模型参数,即得到最终的合成图像。 \n\n在下面,我们将通过代码来进一步了解样式迁移的技术细节。\n\n## [**阅读内容和样式图像**]\n\n首先,我们读取内容和样式图像。\n从打印出的图像坐标轴可以看出,它们的尺寸并不一样。\n",
"_____no_output_____"
]
],
[
[
"%matplotlib inline\nimport torch\nimport torchvision\nfrom torch import nn\nfrom d2l import torch as d2l\n\nd2l.set_figsize()\ncontent_img = d2l.Image.open('../img/rainier.jpg')\nd2l.plt.imshow(content_img);",
"_____no_output_____"
],
[
"style_img = d2l.Image.open('../img/autumn-oak.jpg')\nd2l.plt.imshow(style_img);",
"_____no_output_____"
]
],
[
[
"## [**预处理和后处理**]\n\n下面,定义图像的预处理函数和后处理函数。\n预处理函数`preprocess`对输入图像在RGB三个通道分别做标准化,并将结果变换成卷积神经网络接受的输入格式。\n后处理函数`postprocess`则将输出图像中的像素值还原回标准化之前的值。\n由于图像打印函数要求每个像素的浮点数值在0到1之间,我们对小于0和大于1的值分别取0和1。\n",
"_____no_output_____"
]
],
[
[
"rgb_mean = torch.tensor([0.485, 0.456, 0.406])\nrgb_std = torch.tensor([0.229, 0.224, 0.225])\n\ndef preprocess(img, image_shape):\n transforms = torchvision.transforms.Compose([\n torchvision.transforms.Resize(image_shape),\n torchvision.transforms.ToTensor(),\n torchvision.transforms.Normalize(mean=rgb_mean, std=rgb_std)])\n return transforms(img).unsqueeze(0)\n\ndef postprocess(img):\n img = img[0].to(rgb_std.device)\n img = torch.clamp(img.permute(1, 2, 0) * rgb_std + rgb_mean, 0, 1)\n return torchvision.transforms.ToPILImage()(img.permute(2, 0, 1))",
"_____no_output_____"
]
],
[
[
"## [**抽取图像特征**]\n\n我们使用基于ImageNet数据集预训练的VGG-19模型来抽取图像特征 :cite:`Gatys.Ecker.Bethge.2016`。\n",
"_____no_output_____"
]
],
[
[
"pretrained_net = torchvision.models.vgg19(pretrained=True)",
"_____no_output_____"
]
],
[
[
"为了抽取图像的内容特征和样式特征,我们可以选择VGG网络中某些层的输出。\n一般来说,越靠近输入层,越容易抽取图像的细节信息;反之,则越容易抽取图像的全局信息。\n为了避免合成图像过多保留内容图像的细节,我们选择VGG较靠近输出的层,即*内容层*,来输出图像的内容特征。\n我们还从VGG中选择不同层的输出来匹配局部和全局的样式,这些图层也称为*样式层*。\n正如 :numref:`sec_vgg` 中所介绍的,VGG网络使用了5个卷积块。\n实验中,我们选择第四卷积块的最后一个卷积层作为内容层,选择每个卷积块的第一个卷积层作为样式层。\n这些层的索引可以通过打印`pretrained_net`实例获取。\n",
"_____no_output_____"
]
],
[
[
"style_layers, content_layers = [0, 5, 10, 19, 28], [25]",
"_____no_output_____"
]
],
[
[
"使用VGG层抽取特征时,我们只需要用到从输入层到最靠近输出层的内容层或样式层之间的所有层。\n下面构建一个新的网络`net`,它只保留需要用到的VGG的所有层。\n",
"_____no_output_____"
]
],
[
[
"net = nn.Sequential(*[pretrained_net.features[i] for i in\n range(max(content_layers + style_layers) + 1)])",
"_____no_output_____"
]
],
[
[
"给定输入`X`,如果我们简单地调用前向计算`net(X)`,只能获得最后一层的输出。\n由于我们还需要中间层的输出,因此这里我们逐层计算,并保留内容层和样式层的输出。\n",
"_____no_output_____"
]
],
[
[
"def extract_features(X, content_layers, style_layers):\n contents = []\n styles = []\n for i in range(len(net)):\n X = net[i](X)\n if i in style_layers:\n styles.append(X)\n if i in content_layers:\n contents.append(X)\n return contents, styles",
"_____no_output_____"
]
],
[
[
"下面定义两个函数:`get_contents`函数对内容图像抽取内容特征;\n`get_styles`函数对样式图像抽取样式特征。\n因为在训练时无须改变预训练的VGG的模型参数,所以我们可以在训练开始之前就提取出内容特征和样式特征。\n由于合成图像是样式迁移所需迭代的模型参数,我们只能在训练过程中通过调用`extract_features`函数来抽取合成图像的内容特征和样式特征。\n",
"_____no_output_____"
]
],
[
[
"def get_contents(image_shape, device):\n content_X = preprocess(content_img, image_shape).to(device)\n contents_Y, _ = extract_features(content_X, content_layers, style_layers)\n return content_X, contents_Y\n\ndef get_styles(image_shape, device):\n style_X = preprocess(style_img, image_shape).to(device)\n _, styles_Y = extract_features(style_X, content_layers, style_layers)\n return style_X, styles_Y",
"_____no_output_____"
]
],
[
[
"## [**定义损失函数**]\n\n下面我们来描述样式迁移的损失函数。\n它由内容损失、样式损失和总变差损失3部分组成。 \n\n### 内容损失\n\n与线性回归中的损失函数类似,内容损失通过平方误差函数衡量合成图像与内容图像在内容特征上的差异。\n平方误差函数的两个输入均为`extract_features`函数计算所得到的内容层的输出。\n",
"_____no_output_____"
]
],
[
[
"def content_loss(Y_hat, Y):\n # 我们从动态计算梯度的树中分离目标:\n # 这是一个规定的值,而不是一个变量。\n return torch.square(Y_hat - Y.detach()).mean()",
"_____no_output_____"
]
],
[
[
"### 样式损失\n\n样式损失与内容损失类似,也通过平方误差函数衡量合成图像与样式图像在样式上的差异。\n为了表达样式层输出的样式,我们先通过`extract_features`函数计算样式层的输出。\n假设该输出的样本数为1,通道数为$c$,高和宽分别为$h$和$w$,我们可以将此输出转换为矩阵 $\\mathbf{X}$,其有$c$行和$hw$列。\n这个矩阵可以被看作是由$c$个长度为$hw$的向量$\\mathbf{x}_1, \\ldots, \\mathbf{x}_c$组合而成的。其中向量$\\mathbf{x}_i$代表了通道 $i$ 上的样式特征。 \n\n在这些向量的*格拉姆矩阵* $\\mathbf{X}\\mathbf{X}^\\top \\in \\mathbb{R}^{c \\times c}$ 中,$i$ 行 $j$ 列的元素 $x_{ij}$ 即向量 $\\mathbf{x}_i$ 和 $\\mathbf{x}_j$ 的内积。它表达了通道 $i$ 和通道 $j$ 上样式特征的相关性。我们用这样的格拉姆矩阵来表达样式层输出的样式。\n需要注意的是,当$hw$的值较大时,格拉姆矩阵中的元素容易出现较大的值。\n此外,格拉姆矩阵的高和宽皆为通道数$c$。\n为了让样式损失不受这些值的大小影响,下面定义的`gram`函数将格拉姆矩阵除以了矩阵中元素的个数,即 $chw$ 。\n",
"_____no_output_____"
]
],
[
[
"def gram(X):\n num_channels, n = X.shape[1], X.numel() // X.shape[1]\n X = X.reshape((num_channels, n))\n return torch.matmul(X, X.T) / (num_channels * n)",
"_____no_output_____"
]
],
[
[
"自然地,样式损失的平方误差函数的两个格拉姆矩阵输入分别基于合成图像与样式图像的样式层输出。这里假设基于样式图像的格拉姆矩阵`gram_Y`已经预先计算好了。\n",
"_____no_output_____"
]
],
[
[
"def style_loss(Y_hat, gram_Y):\n return torch.square(gram(Y_hat) - gram_Y.detach()).mean()",
"_____no_output_____"
]
],
[
[
"### 总变差损失\n\n有时候,我们学到的合成图像里面有大量高频噪点,即有特别亮或者特别暗的颗粒像素。\n一种常见的降噪方法是*总变差降噪*:\n假设 $x_{i, j}$ 表示坐标 $(i, j)$ 处的像素值,降低总变差损失 \n\n$$\\sum_{i, j} \\left|x_{i, j} - x_{i+1, j}\\right| + \\left|x_{i, j} - x_{i, j+1}\\right|$$\n\n能够尽可能使邻近的像素值相似。\n",
"_____no_output_____"
]
],
[
[
"def tv_loss(Y_hat):\n return 0.5 * (torch.abs(Y_hat[:, :, 1:, :] - Y_hat[:, :, :-1, :]).mean() +\n torch.abs(Y_hat[:, :, :, 1:] - Y_hat[:, :, :, :-1]).mean())",
"_____no_output_____"
]
],
[
[
"### 损失函数\n\n[**风格转移的损失函数是内容损失、风格损失和总变化损失的加权和**]。\n通过调节这些权值超参数,我们可以权衡合成图像在保留内容、迁移样式以及降噪三方面的相对重要性。\n",
"_____no_output_____"
]
],
[
[
"content_weight, style_weight, tv_weight = 1, 1e3, 10\n\ndef compute_loss(X, contents_Y_hat, styles_Y_hat, contents_Y, styles_Y_gram):\n # 分别计算内容损失、样式损失和总变差损失\n contents_l = [content_loss(Y_hat, Y) * content_weight for Y_hat, Y in zip(\n contents_Y_hat, contents_Y)]\n styles_l = [style_loss(Y_hat, Y) * style_weight for Y_hat, Y in zip(\n styles_Y_hat, styles_Y_gram)]\n tv_l = tv_loss(X) * tv_weight\n # 对所有损失求和\n l = sum(10 * styles_l + contents_l + [tv_l])\n return contents_l, styles_l, tv_l, l",
"_____no_output_____"
]
],
[
[
"## [**初始化合成图像**]\n\n在样式迁移中,合成的图像是训练期间唯一需要更新的变量。因此,我们可以定义一个简单的模型 `SynthesizedImage`,并将合成的图像视为模型参数。模型的前向计算只需返回模型参数即可。\n",
"_____no_output_____"
]
],
[
[
"class SynthesizedImage(nn.Module):\n def __init__(self, img_shape, **kwargs):\n super(SynthesizedImage, self).__init__(**kwargs)\n self.weight = nn.Parameter(torch.rand(*img_shape))\n\n def forward(self):\n return self.weight",
"_____no_output_____"
]
],
[
[
"下面,我们定义 `get_inits` 函数。该函数创建了合成图像的模型实例,并将其初始化为图像 `X` 。样式图像在各个样式层的格拉姆矩阵 `styles_Y_gram` 将在训练前预先计算好。\n",
"_____no_output_____"
]
],
[
[
"def get_inits(X, device, lr, styles_Y):\n gen_img = SynthesizedImage(X.shape).to(device)\n gen_img.weight.data.copy_(X.data)\n trainer = torch.optim.Adam(gen_img.parameters(), lr=lr)\n styles_Y_gram = [gram(Y) for Y in styles_Y]\n return gen_img(), styles_Y_gram, trainer",
"_____no_output_____"
]
],
[
[
"## [**训练模型**]\n\n在训练模型进行样式迁移时,我们不断抽取合成图像的内容特征和样式特征,然后计算损失函数。下面定义了训练循环。\n",
"_____no_output_____"
]
],
[
[
"def train(X, contents_Y, styles_Y, device, lr, num_epochs, lr_decay_epoch):\n X, styles_Y_gram, trainer = get_inits(X, device, lr, styles_Y)\n scheduler = torch.optim.lr_scheduler.StepLR(trainer, lr_decay_epoch, 0.8)\n animator = d2l.Animator(xlabel='epoch', ylabel='loss',\n xlim=[10, num_epochs],\n legend=['content', 'style', 'TV'],\n ncols=2, figsize=(7, 2.5))\n for epoch in range(num_epochs):\n trainer.zero_grad()\n contents_Y_hat, styles_Y_hat = extract_features(\n X, content_layers, style_layers)\n contents_l, styles_l, tv_l, l = compute_loss(\n X, contents_Y_hat, styles_Y_hat, contents_Y, styles_Y_gram)\n l.backward()\n trainer.step()\n scheduler.step()\n if (epoch + 1) % 10 == 0:\n animator.axes[1].imshow(postprocess(X))\n animator.add(epoch + 1, [float(sum(contents_l)),\n float(sum(styles_l)), float(tv_l)])\n return X",
"_____no_output_____"
]
],
[
[
"现在我们[**训练模型**]:\n首先将内容图像和样式图像的高和宽分别调整为300和450像素,用内容图像来初始化合成图像。\n",
"_____no_output_____"
]
],
[
[
"device, image_shape = d2l.try_gpu(), (300, 450)\nnet = net.to(device)\ncontent_X, contents_Y = get_contents(image_shape, device)\n_, styles_Y = get_styles(image_shape, device)\noutput = train(content_X, contents_Y, styles_Y, device, 0.3, 500, 50)",
"_____no_output_____"
]
],
[
[
"我们可以看到,合成图像保留了内容图像的风景和物体,并同时迁移了样式图像的色彩。例如,合成图像具有与样式图像中一样的色彩块,其中一些甚至具有画笔笔触的细微纹理。 \n\n## 小结\n\n* 样式迁移常用的损失函数由3部分组成:(i) 内容损失使合成图像与内容图像在内容特征上接近;(ii) 样式损失令合成图像与样式图像在样式特征上接近;(iii) 总变差损失则有助于减少合成图像中的噪点。\n* 我们可以通过预训练的卷积神经网络来抽取图像的特征,并通过最小化损失函数来不断更新合成图像来作为模型参数。\n* 我们使用格拉姆矩阵表达样式层输出的样式。\n\n## 练习\n\n1. 选择不同的内容和样式层,输出有什么变化?\n1. 调整损失函数中的权值超参数。输出是否保留更多内容或减少更多噪点?\n1. 替换实验中的内容图像和样式图像,你能创作出更有趣的合成图像吗?\n1. 我们可以对文本使用样式迁移吗?提示:你可以参阅调查报告 :cite:`Hu.Lee.Aggarwal.2020`。\n",
"_____no_output_____"
],
[
"[Discussions](https://discuss.d2l.ai/t/3300)\n",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
]
] |
4a65194e91b5c55deae1716257f513966d552d1a
| 8,522 |
ipynb
|
Jupyter Notebook
|
src/PyTorch/generate.ipynb
|
nikunjlad/Text-Generation-using-LSTMs
|
f58285e5b70f2c36241221dc38c39c46a7b0da1f
|
[
"MIT"
] | null | null | null |
src/PyTorch/generate.ipynb
|
nikunjlad/Text-Generation-using-LSTMs
|
f58285e5b70f2c36241221dc38c39c46a7b0da1f
|
[
"MIT"
] | null | null | null |
src/PyTorch/generate.ipynb
|
nikunjlad/Text-Generation-using-LSTMs
|
f58285e5b70f2c36241221dc38c39c46a7b0da1f
|
[
"MIT"
] | null | null | null | 29.085324 | 106 | 0.525111 |
[
[
[
"!pip install -U -q pyDrive\nfrom pydrive.auth import GoogleAuth\nfrom pydrive.drive import GoogleDrive\nfrom google.colab import auth\nfrom oauth2client.client import GoogleCredentials",
"_____no_output_____"
],
[
"auth.authenticate_user()\ngauth = GoogleAuth()\ngauth.credentials = GoogleCredentials.get_application_default()\ndrive = GoogleDrive(gauth)",
"_____no_output_____"
],
[
"import os\nos.listdir()\nos.chdir(\"drive/My Drive/Colab Notebooks/Text-Generation/PyTorch/\")",
"_____no_output_____"
],
[
"import argparse\nimport torch\nimport data",
"_____no_output_____"
],
[
"# parser = argparse.ArgumentParser(description='PyTorch Wikitext-2 Language Model')\n\n# # Model parameters.\n# parser.add_argument('--data', type=str, default='./data/wikitext-2',\n# help='location of the data corpus')\n# parser.add_argument('--checkpoint', type=str, default='./model.pt',\n# help='model checkpoint to use')\n# parser.add_argument('--outf', type=str, default='generated.txt',\n# help='output file for generated text')\n# parser.add_argument('--words', type=int, default='1000',\n# help='number of words to generate')\n# parser.add_argument('--seed', type=int, default=1111,\n# help='random seed')\n# parser.add_argument('--cuda', action='store_true',\n# help='use CUDA')\n# parser.add_argument('--temperature', type=float, default=1.0,\n# help='temperature - higher will increase diversity')\n# parser.add_argument('--log-interval', type=int, default=100,\n# help='reporting interval')\n# args = parser.parse_args()\n\nargs = {}\nargs[\"data\"] = \"./data/paul_graham/\"\nargs[\"checkpoint\"] = \"./model3.pt\"\nargs[\"outf\"] = \"generated3.txt\"\nargs[\"words\"] = 1000\nargs[\"seed\"] = 1111\nargs[\"cuda\"] = True\nargs[\"temperature\"] = 1.0\nargs[\"log_interval\"] = 100",
"_____no_output_____"
],
[
"# Set the random seed manually for reproducibility.\ntorch.manual_seed(args[\"seed\"])\nif torch.cuda.is_available():\n if not args[\"cuda\"]:\n print(\"WARNING: You have a CUDA device, so you should probably run with --cuda\")\n\ndevice = torch.device(\"cuda\" if args[\"cuda\"] else \"cpu\")",
"_____no_output_____"
],
[
"if args[\"temperature\"] < 1e-3:\n parser.error(\"--temperature has to be greater or equal 1e-3\")\n\nwith open(args[\"checkpoint\"], 'rb') as f:\n model = torch.load(f).to(device)\nmodel.eval()",
"_____no_output_____"
],
[
"corpus = data.Corpus(args[\"data\"])\nntokens = len(corpus.dictionary)\n\nis_transformer_model = hasattr(model, 'model_type') and model.model_type == 'Transformer'\nif not is_transformer_model:\n hidden = model.init_hidden(1)\ninput = torch.randint(ntokens, (1, 1), dtype=torch.long).to(device)\n\nwith open(args[\"outf\"], 'w') as outf:\n with torch.no_grad(): # no tracking history\n for i in range(args[\"words\"]):\n if is_transformer_model:\n output = model(input, False)\n word_weights = output[-1].squeeze().div(args[\"temperature\"]).exp().cpu()\n word_idx = torch.multinomial(word_weights, 1)[0]\n word_tensor = torch.Tensor([[word_idx]]).long().to(device)\n input = torch.cat([input, word_tensor], 0)\n else:\n output, hidden = model(input, hidden)\n word_weights = output.squeeze().div(args[\"temperature\"]).exp().cpu()\n word_idx = torch.multinomial(word_weights, 1)[0]\n input.fill_(word_idx)\n\n word = corpus.dictionary.idx2word[word_idx]\n\n outf.write(word + ('\\n' if i % 20 == 19 else ' '))\n\n if i % args[\"log_interval\"] == 0:\n print('| Generated {}/{} words'.format(i, args[\"words\"]))\n",
"| Generated 0/1000 words\n| Generated 100/1000 words\n| Generated 200/1000 words\n| Generated 300/1000 words\n| Generated 400/1000 words\n| Generated 500/1000 words\n| Generated 600/1000 words\n| Generated 700/1000 words\n| Generated 800/1000 words\n| Generated 900/1000 words\n"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a651ea04225640e458c9aff6bb3a4ddcb07bcd4
| 5,601 |
ipynb
|
Jupyter Notebook
|
docs/_sources/Module3/m3_04.ipynb
|
liuzhengqi1996/math452_Spring2022
|
b01d1d9bee4778b3069e314c775a54f16dd44053
|
[
"MIT"
] | null | null | null |
docs/_sources/Module3/m3_04.ipynb
|
liuzhengqi1996/math452_Spring2022
|
b01d1d9bee4778b3069e314c775a54f16dd44053
|
[
"MIT"
] | null | null | null |
docs/_sources/Module3/m3_04.ipynb
|
liuzhengqi1996/math452_Spring2022
|
b01d1d9bee4778b3069e314c775a54f16dd44053
|
[
"MIT"
] | null | null | null | 45.169355 | 813 | 0.672023 |
[
[
[
"# Deep neural network functions ",
"_____no_output_____"
]
],
[
[
"from IPython.display import IFrame\n\nIFrame(src= \"https://cdnapisec.kaltura.com/p/2356971/sp/235697100/embedIframeJs/uiconf_id/41416911/partner_id/2356971?iframeembed=true&playerId=kaltura_player&entry_id=1_1x52r7v2&flashvars[streamerType]=auto&flashvars[localizationCode]=en&flashvars[leadWithHTML5]=true&flashvars[sideBarContainer.plugin]=true&flashvars[sideBarContainer.position]=left&flashvars[sideBarContainer.clickToClose]=true&flashvars[chapters.plugin]=true&flashvars[chapters.layout]=vertical&flashvars[chapters.thumbnailRotator]=false&flashvars[streamSelector.plugin]=true&flashvars[EmbedPlayer.SpinnerTarget]=videoHolder&flashvars[dualScreen.plugin]=true&flashvars[hotspots.plugin]=1&flashvars[Kaltura.addCrossoriginToIframe]=true&&wid=1_3eff9dla\" ,width='800', height='500')",
"_____no_output_____"
],
[
"IFrame(src=\"https://cdnapisec.kaltura.com/p/2356971/sp/235697100/embedIframeJs/uiconf_id/41416911/partner_id/2356971?iframeembed=true&playerId=kaltura_player&entry_id=1_wwt0aak3&flashvars[streamerType]=auto&flashvars[localizationCode]=en&flashvars[leadWithHTML5]=true&flashvars[sideBarContainer.plugin]=true&flashvars[sideBarContainer.position]=left&flashvars[sideBarContainer.clickToClose]=true&flashvars[chapters.plugin]=true&flashvars[chapters.layout]=vertical&flashvars[chapters.thumbnailRotator]=false&flashvars[streamSelector.plugin]=true&flashvars[EmbedPlayer.SpinnerTarget]=videoHolder&flashvars[dualScreen.plugin]=true&flashvars[hotspots.plugin]=1&flashvars[Kaltura.addCrossoriginToIframe]=true&&wid=1_u1s5u1jt\",width='800', height='500') ",
"_____no_output_____"
]
],
[
[
"## Download the lecture notes here: [Notes](https://sites.psu.edu/math452/files/2022/01/C04_-Deep-neural-network-functions.pdf)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown"
] |
[
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
]
] |
4a653d1a9463988ddcb7f3d24fa24ff74f15e7bd
| 828,011 |
ipynb
|
Jupyter Notebook
|
patoh-hyperedge-experiments.ipynb
|
sbarakat/algorithmshop-graph-partitioning
|
db575ce585e2de0df4b0d944c24777cabc2146a3
|
[
"MIT"
] | 13 |
2017-03-26T13:47:51.000Z
|
2021-01-29T14:01:30.000Z
|
patoh-hyperedge-experiments.ipynb
|
younes94/graph-partitioning
|
4114325de22cb446a967bcbf373531d5d86bbac5
|
[
"MIT"
] | null | null | null |
patoh-hyperedge-experiments.ipynb
|
younes94/graph-partitioning
|
4114325de22cb446a967bcbf373531d5d86bbac5
|
[
"MIT"
] | 7 |
2017-03-21T14:01:26.000Z
|
2021-07-28T10:26:42.000Z
| 74.141386 | 19,800 | 0.73452 |
[
[
[
"import os\nimport csv\nimport platform\nimport pandas as pd\nimport networkx as nx\nfrom graph_partitioning import GraphPartitioning, utils\n\nrun_metrics = True\n\ncols = [\"WASTE\", \"CUT RATIO\", \"EDGES CUT\", \"TOTAL COMM VOLUME\", \"Qds\", \"CONDUCTANCE\", \"MAXPERM\", \"NMI\", \"FSCORE\", \"FSCORE RELABEL IMPROVEMENT\", \"LONELINESS\"]\n\npwd = %pwd\n\nconfig = {\n \"DATA_FILENAME\": os.path.join(pwd, \"data\", \"predition_model_tests\", \"network\", \"rand_edge_weights\", \"network_1.txt\"),\n #\"DATA_FILENAME\": os.path.join(pwd, \"data\", \"predition_model_tests\", \"network\", \"network_1.txt\"),\n \"OUTPUT_DIRECTORY\": os.path.join(pwd, \"output\"),\n\n # Set which algorithm is run for the PREDICTION MODEL.\n # Either: 'FENNEL' or 'SCOTCH'\n \"PREDICTION_MODEL_ALGORITHM\": \"SCOTCH\",\n\n # Alternativly, read input file for prediction model.\n # Set to empty to generate prediction model using algorithm value above.\n \"PREDICTION_MODEL\": \"\",\n\n \"PARTITIONER_ALGORITHM\": \"SCOTCH\",\n\n # File containing simulated arrivals. This is used in simulating nodes\n # arriving at the shelter. Nodes represented by line number; value of\n # 1 represents a node as arrived; value of 0 represents the node as not\n # arrived or needing a shelter.\n \"SIMULATED_ARRIVAL_FILE\": os.path.join(pwd,\n \"data\",\n \"predition_model_tests\",\n \"dataset_1_shift_rotate\",\n \"simulated_arrival_list\",\n \"percentage_of_prediction_correct_100\",\n \"arrival_100_1.txt\"\n ),\n \n # File containing the prediction of a node arriving. This is different to the\n # simulated arrivals, the values in this file are known before the disaster.\n \"PREDICTION_LIST_FILE\": os.path.join(pwd,\n \"data\",\n \"predition_model_tests\",\n \"dataset_1_shift_rotate\",\n \"prediction_list\",\n \"prediction_1.txt\"\n ),\n\n # File containing the geographic location of each node, in \"x,y\" format.\n \"POPULATION_LOCATION_FILE\": os.path.join(pwd,\n \"data\",\n \"predition_model_tests\",\n \"coordinates\",\n \"coordinates_1.txt\"\n ),\n\n # Number of shelters\n \"num_partitions\": 4,\n\n # The number of iterations when making prediction model\n \"num_iterations\": 1,\n\n # Percentage of prediction model to use before discarding\n # When set to 0, prediction model is discarded, useful for one-shot\n \"prediction_model_cut_off\": 1.0,\n\n # Alpha value used in one-shot (when restream_batches set to 1)\n \"one_shot_alpha\": 0.5,\n\n # Number of arrivals to batch before recalculating alpha and restreaming.\n # When set to 1, one-shot is used with alpha value from above\n \"restream_batches\": 1000,\n\n # When the batch size is reached: if set to True, each node is assigned\n # individually as first in first out. If set to False, the entire batch\n # is processed and empty before working on the next batch.\n \"sliding_window\": False,\n\n # Create virtual nodes based on prediction model\n \"use_virtual_nodes\": False,\n\n # Virtual nodes: edge weight\n \"virtual_edge_weight\": 1.0,\n\n # Loneliness score parameter. Used when scoring a partition by how many\n # lonely nodes exist.\n \"loneliness_score_param\": 1.2,\n\n ####\n # GRAPH MODIFICATION FUNCTIONS\n\n # Also enables the edge calculation function.\n \"graph_modification_functions\": True,\n\n # If set, the node weight is set to 100 if the node arrives at the shelter,\n # otherwise the node is removed from the graph.\n \"alter_arrived_node_weight_to_100\": False,\n\n # Uses generalized additive models from R to generate prediction of nodes not\n # arrived. This sets the node weight on unarrived nodes the the prediction\n # given by a GAM.\n # Needs POPULATION_LOCATION_FILE to be set.\n \"alter_node_weight_to_gam_prediction\": False,\n\n # Enables edge expansion when graph_modification_functions is set to true\n \"edge_expansion_enabled\": True,\n\n # The value of 'k' used in the GAM will be the number of nodes arrived until\n # it reaches this max value.\n \"gam_k_value\": 100,\n\n # Alter the edge weight for nodes that haven't arrived. This is a way to\n # de-emphasise the prediction model for the unknown nodes.\n \"prediction_model_emphasis\": 1.0,\n \n # This applies the prediction_list_file node weights onto the nodes in the graph\n # when the prediction model is being computed and then removes the weights\n # for the cutoff and batch arrival modes\n \"apply_prediction_model_weights\": True,\n\n \"SCOTCH_LIB_PATH\": os.path.join(pwd, \"libs/scotch/macOS/libscotch.dylib\")\n if 'Darwin' in platform.system()\n else \"/usr/local/lib/libscotch.so\",\n \n # Path to the PaToH shared library\n \"PATOH_LIB_PATH\": os.path.join(pwd, \"libs/patoh/lib/macOS/libpatoh.dylib\")\n if 'Darwin' in platform.system()\n else os.path.join(pwd, \"libs/patoh/lib/linux/libpatoh.so\"),\n \n \"PATOH_ITERATIONS\": 10,\n \n # Expansion modes: 'no_expansion', 'avg_node_weight', 'total_node_weight', 'smallest_node_weight'\n # 'largest_node_weight', 'product_node_weight'\n # add '_squared' or '_sqrt' at the end of any of the above for ^2 or sqrt(weight)\n # add '_complete' for applying the complete algorithm\n # for hyperedge with weights: A, B, C, D\n # new weights are computed\n # (A*B)^2 = H0\n # (A*C)^2 = H1, ... Hn-1\n # then normal hyperedge expansion computed on H0...Hn-1\n # i.e. 'avg_node_weight_squared\n \"PATOH_HYPEREDGE_EXPANSION_MODE\": 'total_node_weight_sqrt_complete',\n\n # Alters how much information to print. Keep it at 1 for this notebook.\n # 0 - will print nothing, useful for batch operations.\n # 1 - prints basic information on assignments and operations.\n # 2 - prints more information as it batches arrivals.\n \"verbose\": 1\n}\n\n#gp = GraphPartitioning(config)\n\n# Optional: shuffle the order of nodes arriving\n# Arrival order should not be shuffled if using GAM to alter node weights\n#random.shuffle(gp.arrival_order)\n\n%pylab inline",
"Populating the interactive namespace from numpy and matplotlib\n"
],
[
"import scipy\n\niterations = 1000\n#modes = ['product_node_weight_complete_sqrt']\nmodes = ['no_expansion', 'avg_node_weight_complete', 'total_node_weight_complete', 'smallest_node_weight_complete','largest_node_weight_complete']\n#modes = ['no_expansion']\n\nfor mode in modes:\n\n metricsDataPrediction = []\n metricsDataAssign = []\n \n dataQdsOv = []\n dataCondOv = []\n\n config['PATOH_HYPEREDGE_EXPANSION_MODE'] = mode\n print('Mode', mode)\n for i in range(0, iterations):\n if (i % 50) == 0:\n print('Mode', mode, 'Iteration', str(i))\n \n config[\"DATA_FILENAME\"] = os.path.join(pwd, \"data\", \"predition_model_tests\", \"network\", \"network_\" + str(i + 1) + \".txt\")\n \n gp = GraphPartitioning(config)\n gp.verbose = 0\n gp.load_network()\n gp.init_partitioner()\n\n m = gp.prediction_model()\n metricsDataPrediction.append(m[0])\n\n '''\n\n #write_graph_files\n #\n gp.metrics_timestamp = datetime.datetime.now().strftime('%H%M%S')\n f,_ = os.path.splitext(os.path.basename(gp.DATA_FILENAME))\n gp.metrics_filename = f + \"-\" + gp.metrics_timestamp\n\n\n if not os.path.exists(gp.OUTPUT_DIRECTORY):\n os.makedirs(gp.OUTPUT_DIRECTORY)\n if not os.path.exists(os.path.join(gp.OUTPUT_DIRECTORY, 'oslom')):\n os.makedirs(os.path.join(gp.OUTPUT_DIRECTORY, 'oslom'))\n\n \n file_oslom = os.path.join(gp.OUTPUT_DIRECTORY, 'oslom', \"{}-all\".format(gp.metrics_filename) + '-edges-oslom.txt')\n with open(file_oslom, \"w\") as outf:\n for e in gp.G.edges_iter(data=True):\n outf.write(\"{}\\t{}\\t{}\\n\".format(e[0], e[1], e[2][\"weight\"]))\n \n #file_oslom = utils.write_graph_files(gp.OUTPUT_DIRECTORY,\n # \"{}-all\".format(gp.metrics_filename),\n # gp.G,\n # quiet=True)\n\n community_metrics = utils.run_community_metrics(gp.OUTPUT_DIRECTORY,\n \"{}-all\".format(gp.metrics_filename),\n file_oslom)\n \n dataQdsOv.append(float(community_metrics['Qds']))\n dataCondOv.append(float(community_metrics['conductance']))\n '''\n\n\n ec = ''\n tcv = ''\n qds = ''\n conductance = ''\n maxperm = ''\n nmi = ''\n lonliness = ''\n \n qdsOv = ''\n condOv = ''\n\n dataEC = []\n dataTCV = [] \n dataQDS = []\n dataCOND = []\n dataMAXPERM = []\n dataNMI = []\n dataLonliness = []\n\n \n for i in range(0, iterations):\n dataEC.append(metricsDataPrediction[i][2])\n dataTCV.append(metricsDataPrediction[i][3])\n dataQDS.append(metricsDataPrediction[i][4])\n dataCOND.append(metricsDataPrediction[i][5])\n dataMAXPERM.append(metricsDataPrediction[i][6])\n dataNMI.append(metricsDataPrediction[i][7]) \n dataLonliness.append(metricsDataPrediction[i][10])\n\n\n # UNCOMMENT FOR BATCH ARRIVAL\n #dataECB.append(metricsDataAssign[i][2])\n #dataTCVB.append(metricsDataAssign[i][3])\n\n if(len(ec)):\n ec = ec + ','\n ec = ec + str(metricsDataPrediction[i][2])\n if(len(tcv)):\n tcv = tcv + ','\n tcv = tcv + str(metricsDataPrediction[i][3])\n\n if(len(qds)):\n qds = qds + ','\n qds = qds + str(metricsDataPrediction[i][4])\n\n if(len(conductance)):\n conductance = conductance + ','\n conductance = conductance + str(metricsDataPrediction[i][5])\n\n if(len(maxperm)):\n maxperm = maxperm + ','\n maxperm = maxperm + str(metricsDataPrediction[i][6])\n\n if(len(nmi)):\n nmi = nmi + ','\n nmi = nmi + str(metricsDataPrediction[i][7])\n\n if(len(lonliness)):\n lonliness = lonliness + ','\n lonliness = lonliness + str(dataLonliness[i])\n \n '''\n if(len(qdsOv)):\n qdsOv = qdsOv + ','\n qdsOv = qdsOv + str(dataQdsOv[i])\n\n if(len(condOv)):\n condOv = condOv + ','\n condOv = condOv + str(dataCondOv[i])\n '''\n\n ec = 'EC_PM,' + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataEC)) + ',' + str(scipy.std(dataEC)) + ',' + ec\n tcv = 'TCV_PM,' + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataTCV)) + ',' + str(scipy.std(dataTCV)) + ',' + tcv\n\n lonliness = \"LONELINESS,\" + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataLonliness)) + ',' + str(scipy.std(dataLonliness)) + ',' + lonliness\n \n qds = 'QDS_PM,' + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataQDS)) + ',' + str(scipy.std(dataQDS)) + ',' + qds\n conductance = 'CONDUCTANCE_PM,' + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataCOND)) + ',' + str(scipy.std(dataCOND)) + ',' + conductance\n maxperm = 'MAXPERM_PM,' + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataMAXPERM)) + ',' + str(scipy.std(dataMAXPERM)) + ',' + maxperm\n nmi = 'NMI_PM,' + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataNMI)) + ',' + str(scipy.std(dataNMI)) + ',' + nmi\n\n #qdsOv = 'QDS_OV,' + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataQdsOv)) + ',' + str(scipy.std(dataQdsOv)) + qdsOv\n #condOv = 'CONDUCTANCE_OV,' + config['PATOH_HYPEREDGE_EXPANSION_MODE'] + ',' + str(scipy.mean(dataCondOv)) + ',' + str(scipy.std(dataCondOv)) + condOv\n\n print(ec)\n print(tcv)\n print(lonliness)\n print(qds)\n print(conductance)\n print(maxperm)\n #print(qdsOv)\n #print(condOv)",
"Mode no_expansion\nMode no_expansion Iteration 0\nMode no_expansion Iteration 50\nMode no_expansion Iteration 100\nMode no_expansion Iteration 150\nMode no_expansion Iteration 200\nMode no_expansion Iteration 250\nMode no_expansion Iteration 300\nMode no_expansion Iteration 350\nMode no_expansion Iteration 400\nMode no_expansion Iteration 450\nMode no_expansion Iteration 500\nMode no_expansion Iteration 550\nMode no_expansion Iteration 600\nMode no_expansion Iteration 650\nMode no_expansion Iteration 700\nMode no_expansion Iteration 750\nMode no_expansion Iteration 800\nMode no_expansion Iteration 850\nMode no_expansion Iteration 900\nMode no_expansion Iteration 950\nEC_PM,no_expansion,109.441,14.2816147196,138,113,94,105,131,97,101,86,110,112,91,114,96,118,132,83,99,110,114,125,97,98,105,128,125,110,102,125,127,121,135,114,108,119,113,103,103,112,107,92,109,118,115,93,86,101,85,114,115,103,114,101,122,116,110,156,89,119,116,93,95,95,100,105,120,107,104,113,86,86,132,102,134,103,123,109,86,96,90,111,99,101,103,84,107,92,116,127,122,111,125,115,124,102,125,96,107,115,112,109,115,131,108,108,104,116,101,128,93,123,123,99,102,104,116,121,109,110,133,100,111,101,111,123,120,90,109,96,120,110,112,99,112,81,120,137,114,100,110,154,133,115,131,100,105,105,120,103,97,121,124,117,114,100,108,102,110,109,113,80,106,105,114,149,102,109,132,124,108,89,119,94,126,92,127,104,106,105,98,124,93,92,90,107,114,128,115,110,115,108,101,116,100,112,95,105,97,104,89,115,93,104,108,114,116,93,100,98,121,99,103,88,120,103,88,109,94,128,143,129,124,95,84,109,128,93,102,104,105,104,108,120,106,111,111,123,118,130,118,118,102,105,99,114,122,98,117,116,98,100,103,127,86,103,131,108,116,107,127,111,104,93,106,94,112,143,134,99,115,125,117,106,113,114,101,106,117,97,139,98,107,113,107,99,113,110,123,95,132,102,93,122,125,82,93,114,113,108,101,98,123,110,138,107,109,103,99,101,118,108,114,105,99,94,117,115,104,108,113,113,102,107,134,79,93,120,91,121,92,112,112,98,114,125,90,116,95,112,117,109,119,129,119,111,138,113,111,110,104,119,103,118,96,123,108,95,112,100,107,111,95,110,112,108,123,120,109,110,139,125,86,104,106,135,100,94,136,84,88,104,111,135,98,116,96,112,88,100,95,92,108,117,121,122,114,82,107,143,101,106,125,98,124,103,120,103,87,132,110,135,122,92,100,142,132,92,112,110,138,94,110,93,109,110,116,96,109,118,92,84,118,95,108,132,108,101,107,122,87,152,103,129,121,102,118,113,130,115,157,106,107,113,126,101,106,96,139,114,123,120,106,115,91,110,106,94,104,109,124,113,111,105,118,137,98,122,91,113,113,102,126,109,104,105,108,98,109,82,125,128,108,119,98,98,94,93,114,89,97,76,119,106,90,98,98,94,90,110,117,110,104,131,91,117,95,122,100,105,89,108,129,91,91,113,84,103,133,138,115,123,131,117,90,111,95,119,115,88,111,92,126,118,127,120,105,104,103,110,118,138,141,84,89,123,114,134,124,114,102,102,107,107,108,129,101,106,105,97,101,103,112,143,91,118,102,125,97,90,72,86,107,107,109,106,92,127,103,106,101,127,101,128,106,107,127,99,103,105,107,112,141,118,104,105,114,111,114,85,89,107,121,108,131,123,106,121,101,68,138,123,91,114,87,121,128,89,112,129,78,130,111,95,128,129,96,112,106,115,87,107,114,101,133,93,109,135,103,114,113,102,112,94,118,107,91,115,122,115,114,113,105,123,103,94,107,124,119,106,115,121,119,107,95,95,139,110,107,106,118,112,118,113,123,108,118,121,103,128,102,108,101,90,120,114,97,140,99,124,109,128,89,110,86,109,115,94,118,102,105,118,97,111,115,104,128,110,93,116,124,109,94,137,96,118,105,103,108,114,127,132,118,120,99,116,116,114,99,119,112,80,105,127,108,117,97,83,86,140,109,101,95,121,116,109,108,114,136,116,108,92,84,95,88,118,103,99,114,118,127,110,137,87,109,129,100,117,122,121,108,101,83,122,118,122,101,105,99,113,104,96,129,122,90,107,107,117,105,118,100,97,87,117,127,100,104,123,131,108,144,102,128,125,134,146,114,96,130,117,117,107,98,110,105,96,88,96,108,120,109,102,126,111,124,99,100,86,97,120,109,106,104,95,112,101,107,97,113,108,95,99,126,116,72,120,94,101,103,93,113,108,128,97,98,81,129,93,144,96,87,97,105,100,88,127,92,110,100,85,124,100,123,134,123,116,140,81,119,95,81,97,112,99,128,95,117,104,110,106,104,109,124,100,92,98,114,98,130,128,94,110,115,113,81,117,131,96,107,129,110,105,100,96,122,126,101,109,113,105,88,137,89,110,122,109,106,109,134,125,118,111,143,93,109,113,119,122,98,96,91,92,133,107,155,125,141,79,91,108,94,115,111,88,94,110,106,122,109,141,137,118,166,97,99,110,123,119,118,114,82,71,75,98,98,118,102,93,133,77,96,102,85,124,116,100,111,120,118,108,108,85\nTCV_PM,no_expansion,144.409,12.945335801,159,154,134,138,151,130,147,123,138,154,130,149,132,150,146,128,147,136,146,142,140,139,143,158,157,154,140,149,150,151,160,157,153,148,152,134,143,143,144,128,140,156,158,137,121,147,115,142,160,139,163,139,153,147,142,175,133,153,144,133,136,142,126,139,145,135,142,156,127,123,157,138,161,145,165,148,132,123,130,135,122,146,130,115,138,132,156,155,160,147,165,152,153,136,154,132,136,158,141,147,151,154,145,146,144,141,144,150,139,171,158,136,145,146,162,152,140,135,176,141,141,131,150,152,158,128,152,132,154,149,147,135,154,124,148,163,143,125,134,180,157,155,177,136,140,148,153,147,148,160,165,157,145,146,146,140,147,147,133,115,149,148,145,168,138,140,172,155,148,124,153,137,149,133,153,125,154,138,144,147,131,136,132,136,159,149,157,133,151,146,139,152,138,157,131,150,131,131,132,144,137,153,149,155,141,135,147,135,153,125,147,126,149,140,133,145,143,161,163,148,165,134,131,146,151,137,139,142,135,134,159,161,131,144,150,162,142,151,162,148,142,139,135,126,151,143,136,132,135,126,144,155,123,147,150,151,137,141,173,150,146,128,142,136,142,166,166,147,147,154,153,153,140,139,146,140,153,138,160,144,149,148,147,131,153,142,155,137,163,151,131,154,158,118,136,156,140,138,129,141,154,152,177,146,157,141,130,126,146,144,134,145,133,139,157,138,149,128,156,164,138,139,153,123,124,132,126,146,129,146,151,141,155,163,132,146,135,156,151,134,148,150,152,152,165,151,139,147,147,150,140,155,140,150,152,142,159,144,138,143,141,150,152,144,143,160,158,141,177,155,127,147,143,163,132,134,160,126,114,149,150,169,136,147,144,141,135,140,134,122,135,150,150,153,150,118,145,179,132,141,161,132,159,127,142,140,125,155,163,175,147,128,130,171,158,125,143,145,175,130,144,127,163,148,161,138,150,153,142,115,154,131,141,168,148,145,139,153,127,176,129,168,155,143,147,143,172,138,177,147,143,133,173,136,139,138,175,137,149,154,137,154,122,146,142,135,143,137,156,146,143,155,158,173,137,154,130,147,148,137,171,151,143,146,143,134,146,117,151,161,141,155,143,133,132,139,149,120,131,115,149,142,139,145,134,133,129,158,161,146,134,162,124,150,137,158,139,135,128,143,164,126,142,150,119,138,155,171,158,161,175,151,120,144,130,138,160,114,135,129,169,148,161,163,142,141,137,139,141,151,163,123,139,164,152,159,142,145,144,130,150,141,144,150,139,142,131,131,131,146,140,162,136,147,131,163,146,128,99,120,147,148,150,152,143,167,149,141,132,153,140,162,140,149,159,143,133,139,144,137,167,155,137,130,134,148,142,125,135,142,143,140,144,146,147,152,145,99,171,149,138,149,128,161,162,137,134,147,106,151,143,137,154,173,125,148,144,150,122,130,153,146,161,128,141,174,148,147,140,144,147,138,152,147,125,148,165,130,141,154,136,146,141,137,147,159,151,141,152,159,145,153,146,134,172,145,140,133,157,156,160,148,148,145,158,160,139,155,142,134,135,129,144,149,131,150,138,167,148,161,131,144,122,138,157,134,160,138,136,144,130,155,158,150,148,144,123,146,160,142,129,160,133,132,142,133,149,154,168,155,153,158,134,140,148,145,128,146,160,122,125,160,142,162,131,122,125,162,139,143,132,145,152,141,133,147,155,145,144,128,119,135,120,159,142,143,149,159,161,151,169,128,157,167,128,145,147,160,145,141,120,161,160,161,133,147,138,148,151,130,169,157,133,135,151,154,146,150,146,140,122,148,166,142,138,153,152,139,176,139,180,145,159,177,146,132,162,141,153,138,125,145,144,134,125,130,138,151,145,124,156,153,165,130,140,130,132,162,148,140,127,132,143,147,131,138,166,141,140,143,150,144,112,154,138,128,136,141,138,137,166,129,141,122,155,134,181,139,126,132,133,135,128,153,123,151,131,134,139,133,141,164,147,154,168,128,159,135,119,139,151,134,161,132,158,139,148,141,152,143,153,133,132,139,153,132,158,143,133,136,161,140,110,143,153,125,135,153,139,149,132,128,148,162,149,148,159,142,126,160,127,137,177,134,141,153,168,139,160,151,151,123,142,155,154,147,140,133,129,136,158,139,170,156,145,121,133,135,129,152,141,124,135,149,143,159,143,172,164,142,181,137,130,142,155,156,157,141,110,107,116,133,143,150,145,137,172,113,130,139,124,156,140,149,137,151,146,142,157,125\nLONELINESS,no_expansion,0.859394200636,0.00308889523405,0.856182730907,0.857263252071,0.857222688315,0.861082332504,0.856251994196,0.855592146161,0.860378295672,0.863955183662,0.861650228786,0.857253125941,0.859754179406,0.86001523796,0.858664213273,0.860028308455,0.860602633367,0.863053812707,0.860246288133,0.849599353288,0.860971539626,0.860685256705,0.856978957079,0.854681733818,0.855384577255,0.856945482938,0.863353286318,0.859175495511,0.854658903106,0.850919142785,0.853976402168,0.861735504607,0.856613440459,0.865331234169,0.858201817741,0.867259987011,0.857873493145,0.858617143361,0.86012302601,0.852654369176,0.856748947002,0.861279394242,0.854461079951,0.858466316867,0.858443708803,0.856898531023,0.856223752734,0.86279563522,0.862563012841,0.861617897519,0.853787217121,0.860756502561,0.85870623308,0.859567691943,0.859288339905,0.858373484316,0.861272757656,0.85968569883,0.854980929041,0.860325593575,0.858667636331,0.858996075583,0.859593500256,0.85804119328,0.857185618406,0.860493909545,0.861494971926,0.857614932665,0.858214607725,0.850748878591,0.855553557149,0.857989565938,0.851904985076,0.853633725956,0.860729340598,0.85888651882,0.857049533457,0.85663202273,0.857004262919,0.860498304605,0.856205750745,0.858948295695,0.862032357157,0.860716591271,0.855972117447,0.862041118191,0.85432572499,0.855853284852,0.861504161872,0.858169592453,0.855207973307,0.858223025096,0.862456380107,0.857292137445,0.862345838347,0.859397360198,0.861447685434,0.864868470286,0.865767248544,0.856793743392,0.856722527888,0.856878247718,0.862988703785,0.866033023086,0.858768185137,0.863518345279,0.850546021214,0.859074220272,0.862374365072,0.859521069815,0.865700490027,0.863870093648,0.86192775726,0.858563481148,0.858300422519,0.86529086002,0.858165949813,0.859875257251,0.859149780116,0.858395414453,0.855516542076,0.862067605203,0.86148977491,0.863871987746,0.86162817364,0.855594178984,0.856608404534,0.867042734781,0.857689087754,0.865163287287,0.858381271608,0.860344700889,0.862656057042,0.860401398214,0.856745634396,0.863220029637,0.862140315508,0.859301185639,0.857895586125,0.863973945344,0.866069631432,0.858759584618,0.858396030625,0.862310256793,0.863163199139,0.857921187946,0.858173593699,0.861343315862,0.858376279372,0.858338307349,0.858361813244,0.854538781713,0.862871071756,0.862854048799,0.857417402976,0.861628453283,0.854557605661,0.863731518632,0.855238632344,0.856343428615,0.862277818741,0.862972085074,0.858796898147,0.858401912903,0.861664216157,0.85696718945,0.855911276205,0.857895808147,0.856142526875,0.859861500217,0.857601605809,0.861664091297,0.863195375984,0.865114108391,0.860059582025,0.865232039762,0.857198675198,0.86479452336,0.858280039928,0.85516453658,0.86014380163,0.863125370772,0.856689077294,0.866933921826,0.857718799173,0.862024645406,0.862689233525,0.858441889752,0.857647678742,0.865803874891,0.861501160355,0.860623204704,0.858746112596,0.860722819594,0.865293979952,0.866847181182,0.861102610931,0.857948880908,0.86165022722,0.856686238392,0.85465792161,0.857607272357,0.859618358533,0.863573219508,0.853429564416,0.861521351894,0.856465497965,0.857268045921,0.860106458857,0.860740856737,0.858316406833,0.860989203785,0.859826920238,0.860873337462,0.861203670269,0.859211752839,0.86159106481,0.862699471851,0.858549549082,0.857167042992,0.86141861287,0.865497978023,0.858560493757,0.85822599081,0.854985546834,0.864662937686,0.86072527135,0.859368775415,0.861959898051,0.854966640199,0.854745497523,0.855188264091,0.859545984947,0.860132891103,0.86292386907,0.859947143906,0.867827077793,0.860107912671,0.857656829205,0.867010196985,0.858377812135,0.855822472544,0.859752419206,0.859865444897,0.85861725311,0.85879236087,0.861776883272,0.864883433333,0.861090598057,0.864711858316,0.860859259168,0.860114323828,0.857381150905,0.861240063714,0.858428756719,0.858171177323,0.85900356217,0.858516877248,0.860134806271,0.861325671163,0.855553680872,0.857372468251,0.857755282504,0.860314961225,0.860584217944,0.861786407894,0.859278175982,0.857586643314,0.856415326324,0.862377739453,0.858987237136,0.859918005773,0.857255652666,0.862594386822,0.863822867909,0.860488644117,0.852302134374,0.861249964714,0.857934965301,0.858162636585,0.860318313451,0.860549661913,0.859444243009,0.854570841661,0.856609390309,0.85539026603,0.861959395284,0.858471520678,0.859624438233,0.853456645885,0.859080201193,0.859385308114,0.856281043601,0.857308321255,0.862773459762,0.863322629783,0.863283329451,0.863558854858,0.859312012628,0.859139978274,0.861373889888,0.856315877664,0.859694323603,0.860529625518,0.855575122779,0.859387744012,0.860526696436,0.857582445508,0.859871024698,0.862655676284,0.864289767295,0.858162095456,0.862756493596,0.868223371663,0.859478308432,0.857906779051,0.86047647012,0.85534437604,0.865423037184,0.858198256251,0.862472245395,0.862489443913,0.857278073468,0.856973623695,0.860503337507,0.860634404619,0.857713993957,0.854560290894,0.861944530612,0.858111075017,0.860715118682,0.863648692896,0.862388467739,0.855853113046,0.855468899033,0.859273112853,0.854755571616,0.863212870567,0.857860375179,0.860631736817,0.856993222625,0.858930808417,0.860984911499,0.853583636258,0.858831277382,0.858398319441,0.861460803483,0.859589944465,0.862638748358,0.863835436906,0.859215690401,0.857454963567,0.864733308138,0.860592465065,0.863339239153,0.855138259643,0.864133142254,0.858386649939,0.862635565863,0.862072795448,0.861410070462,0.856469178135,0.854375020659,0.856184283825,0.859979867761,0.859396764992,0.868289697102,0.861219096716,0.860977282531,0.85583172433,0.861155178312,0.851985751927,0.860472940515,0.862483565105,0.868472556503,0.862301945103,0.866290340666,0.860321492337,0.855334519601,0.861936023368,0.85220339342,0.857371177114,0.858639589939,0.857226393924,0.85908633942,0.858830039719,0.856127498568,0.857120983419,0.858013630429,0.861720993689,0.864443392885,0.863550956786,0.865423487591,0.86068218726,0.86103096615,0.857125333867,0.853905425851,0.861594303176,0.865024115553,0.864447992973,0.857686672421,0.858325620566,0.856684791335,0.86023415957,0.85619936108,0.858538617573,0.857087074329,0.859843775295,0.860875861611,0.856287552669,0.861981050302,0.858106333016,0.861244704412,0.859289575511,0.860519595512,0.855995374157,0.85964572111,0.865045254398,0.858852577031,0.856630089641,0.86088831808,0.854509430965,0.860107201116,0.859401707777,0.861972914583,0.85865431041,0.858802858068,0.861867292308,0.859203670364,0.858065381211,0.857371224056,0.866049472651,0.855931810311,0.858822286715,0.860305323248,0.856463777142,0.862393126658,0.860196527407,0.856827553115,0.856951543473,0.863093859043,0.854719607206,0.858636431835,0.857146475305,0.858864059395,0.854892423424,0.860700966394,0.855968308002,0.859909244353,0.857571256201,0.857083552407,0.857835521856,0.863573194643,0.85850660719,0.854535423115,0.863882961765,0.860038909409,0.863141113048,0.857172809485,0.861910232676,0.857731071323,0.855651242331,0.856112629226,0.853577083535,0.85261831279,0.856805433178,0.858439384493,0.859069057243,0.860281713612,0.858094565824,0.863405363471,0.857659842783,0.859637382254,0.857469673518,0.857819385282,0.854201538625,0.85802918794,0.861166040637,0.856874844893,0.860713250727,0.861432914153,0.85393542438,0.858571136787,0.855208425726,0.862331468569,0.862974777919,0.860323399616,0.860347305856,0.857222020982,0.858279597833,0.859375925414,0.861014605443,0.858581621772,0.862075757144,0.859777494338,0.857778175608,0.856917576466,0.859158938201,0.859962278356,0.864016444008,0.856657930741,0.860642205787,0.862586697758,0.860995134008,0.861416494577,0.86009806052,0.858903345027,0.858841270428,0.864507909589,0.855961243921,0.859800472549,0.858117839343,0.85484869871,0.860016442732,0.860518177841,0.85450780332,0.860591339832,0.861189411475,0.862610677759,0.860372520382,0.858989101768,0.861855037622,0.858527424516,0.861682676764,0.854863890148,0.863960652204,0.859385552003,0.850612458675,0.858199206323,0.85615067676,0.865195964285,0.857161937876,0.859413527068,0.853493635742,0.858677758676,0.859392363621,0.861702319701,0.862405046674,0.860145675379,0.863975851123,0.860077527671,0.861528293483,0.863986046129,0.858519282956,0.860207309175,0.859087325451,0.857839943444,0.861028305583,0.861288650055,0.859266657837,0.857597150871,0.856961802975,0.857875401455,0.857334590342,0.861191643651,0.860411514398,0.858975458829,0.857264234107,0.859498536072,0.858580549294,0.859965735241,0.860151806669,0.863015708851,0.852990655957,0.860256210429,0.860522803508,0.857350700399,0.856483694664,0.859775634795,0.861901416944,0.855582037548,0.852938598117,0.856593898443,0.853456015504,0.862199122792,0.858208309416,0.861314786303,0.855102362534,0.85686313132,0.860880575033,0.85798674437,0.860651762422,0.857356335427,0.861417172013,0.856672536931,0.859584001606,0.861278861476,0.862542203296,0.856727186476,0.862543635158,0.86048832334,0.862135490304,0.864618550068,0.858855736218,0.862615141695,0.859164118616,0.859191761116,0.858134565937,0.855410279763,0.86194825856,0.852416197943,0.860198503739,0.862306689404,0.857951414773,0.857895619539,0.861234197911,0.859538176883,0.861127532517,0.86011259503,0.864194444196,0.853509578368,0.85684527751,0.858458850617,0.85971981675,0.859720689511,0.857113607269,0.855070558343,0.859757585949,0.857090710674,0.863609112455,0.858159598847,0.857457705658,0.85943344953,0.860223110657,0.862380955141,0.856196267735,0.859540017185,0.86099499854,0.857866074143,0.860344501527,0.862797399813,0.849633237254,0.861043098443,0.854339875055,0.860912888491,0.858977261549,0.858622334202,0.859217986063,0.860105241833,0.864968539001,0.85592136732,0.857921492008,0.857474264235,0.865246211069,0.862193319444,0.859017253622,0.858962152203,0.859914587283,0.856549290254,0.857896717196,0.861851438289,0.862575861615,0.857122803026,0.861641660083,0.860325284777,0.861260756521,0.860698603166,0.853352078544,0.860663142474,0.863911942667,0.855330400697,0.859987593034,0.857569592172,0.861209764575,0.856566605007,0.859774739745,0.864176732419,0.858770210813,0.857229760081,0.856174132704,0.862992244078,0.860186038023,0.85655656981,0.860887128476,0.85748228277,0.862981059393,0.859170871561,0.858186540257,0.861655981828,0.860502335075,0.857639525725,0.857301037382,0.853349268678,0.860601220054,0.859288121146,0.865567388898,0.858315775154,0.85233356049,0.864674291311,0.859951226805,0.858013030284,0.858257579587,0.868410281354,0.859753097242,0.858831941507,0.860079298829,0.859130386479,0.859248044405,0.857060240928,0.859661523027,0.858301963381,0.858657706577,0.860447019294,0.858873308898,0.857382321184,0.85980882132,0.860700597767,0.859231349022,0.862097142188,0.862915891803,0.863143098726,0.857508801202,0.858124329045,0.861100641024,0.861366888692,0.860021192683,0.862250149201,0.862125225984,0.856363794847,0.861073490102,0.861997173865,0.862291478569,0.862577757512,0.854783645792,0.863170581014,0.859517036169,0.857491995673,0.857177590153,0.864070879573,0.861544263771,0.856761103608,0.862030854962,0.859844790337,0.860109299078,0.861166166176,0.860724289092,0.858952910179,0.86028999958,0.856376324455,0.863934316932,0.851952346364,0.859466956556,0.85954571799,0.863681978904,0.852564238976,0.862415169543,0.859309405776,0.855970342829,0.85654931527,0.856082869267,0.860214072804,0.855083354465,0.859434192703,0.864917756971,0.857269947745,0.857102068939,0.859649758753,0.857570670491,0.855567225628,0.857646058081,0.859373805254,0.863004005904,0.859929652811,0.861200278686,0.861561123888,0.854404597788,0.862949758606,0.862743007104,0.85290315211,0.854434995928,0.858564388132,0.861008847061,0.858315244555,0.859166946811,0.859755763339,0.852801955423,0.859926006587,0.85914810312,0.861077320666,0.857239400691,0.856238908225,0.857123771923,0.866590454122,0.858964451628,0.859987340102,0.858124898412,0.860713872406,0.861813730093,0.856334015071,0.860914975965,0.861962951474,0.85947519788,0.853924989646,0.862803257389,0.862358052344,0.858596189891,0.854416813405,0.857206908225,0.856905661351,0.860064500478,0.855892744538,0.855458847133,0.856666277303,0.861201122446,0.860887179133,0.858012975078,0.854367356025,0.861370033366,0.86249080469,0.861885849848,0.862441150413,0.866590170727,0.858076801007,0.861981999278,0.860066793722,0.861659239053,0.851705411843,0.86186694098,0.861135833791,0.861845263429,0.86002271053,0.859740184428,0.858847172568,0.864473554499,0.855975574786,0.855135601156,0.862966067018,0.85718832899,0.858553189035,0.854924383709,0.856871506002,0.856646747746,0.854565875401,0.859817195444,0.856612577032,0.856935740402,0.860386010808,0.863046875726,0.855798206076,0.854729942174,0.854871896305,0.860751535107,0.865156744998,0.861218572088,0.860554036152,0.85990818023,0.857398279567,0.857658610289,0.861451881224,0.856467413647,0.863293577252,0.859198706272,0.855875029606,0.861227898471,0.858624583812,0.856553636026,0.861465859287,0.861452328178,0.859225998118,0.860422253428,0.861120414475,0.86291722266,0.856919291266,0.859289710321,0.858416204122,0.857849436403,0.858722374261,0.860657687717,0.859878003434,0.858056451724,0.861707245869,0.85602792142,0.858019266205,0.860051262328,0.85523307823,0.856784383333,0.8593263877,0.862014519219,0.858927018421,0.855999687912,0.859185872728,0.856501835103,0.859063391892,0.852103952347,0.858310090153,0.858465127324,0.85305329673,0.862423572611,0.862180665718,0.862709614275,0.866216391007,0.859279083154,0.859556574685,0.85829689937,0.861409874442,0.861873916332,0.859231678392,0.861042589702,0.861282234584,0.856680972347,0.86248538386,0.857547392815,0.854512204209,0.8609947992,0.857831358388,0.859225859134,0.858978537368,0.862482475903,0.854713730112,0.858798658567,0.864019491468,0.863121271983,0.863385325904,0.863076735015,0.856055720249,0.856997368423,0.858307947875,0.86263479428,0.863587538922,0.855977606635,0.860136079327,0.857457889125,0.857764881878,0.857832767755,0.857441806547,0.85774755181,0.865715077383,0.858368022012,0.865201962287,0.856942074612,0.859086571714,0.860432291941,0.857824069279,0.853905709604,0.861910236761,0.858376007054,0.860296759116,0.861441330479,0.854369874819,0.865306737561,0.861536890189,0.859132691508,0.858304487259,0.857823795763,0.861589143695,0.859675105266,0.86083552678,0.859023177169,0.863974415125,0.856793910711,0.85709959039,0.856403960874,0.85471397959,0.861174143449,0.862577724799,0.858940459657,0.852939067534,0.859412133494,0.862651317415,0.85618942892,0.858695159181,0.858185393557,0.862389695135,0.864397547309,0.860183495811,0.861009476117,0.863064096007,0.858906426648,0.864873668279,0.862938460754,0.864953908932,0.861132688486,0.858527234049,0.855744058552,0.858068498511,0.852858859042,0.863692383681,0.858532328604,0.855376286926,0.856077220202,0.866731325854,0.863306638686,0.862495217225,0.85992540676,0.860815707909,0.8588028079,0.86196594211,0.862083857667,0.854889782875,0.860700785224,0.860053888016,0.860309523523,0.86062178701,0.855845615797,0.856920919691,0.858507193403,0.86050625683,0.854100889058,0.861058627875,0.84904918789,0.860269505019,0.863324461228\nQDS_PM,no_expansion,0.368601614726,0.0124496098786,0.3592363224902514,0.3412524623949906,0.36082268362573505,0.3359266958404972,0.3620702659710859,0.3604472449071131,0.3625594180156949,0.3873946680264696,0.3602525966413362,0.3673066840409115,0.3819173086586397,0.37297317550804504,0.37025140069791623,0.3568962130274465,0.38120968635918817,0.3644012475061843,0.3678210460720611,0.3677450181610841,0.37371039587193466,0.36172718421583094,0.3870354927374711,0.36830886841439897,0.3798488685708141,0.3755811253555506,0.37450677116111725,0.36475006169375834,0.38369479708706433,0.36950268331907277,0.39587169279634515,0.37390561805141387,0.37087448399702283,0.3758435043172734,0.35637005693817947,0.37675427132246714,0.3630055639233486,0.3681822047800619,0.394960708213756,0.3552737339457692,0.3429895618103756,0.37586712133960093,0.35642096077413626,0.39018781690923643,0.38037006123357797,0.3947220749461989,0.37082980496162193,0.3316997876775076,0.3857489812980919,0.3796265935840837,0.3442368624713428,0.35067534839891756,0.3658396178207728,0.36341520869854455,0.3407281250894799,0.38912045508326937,0.3662319145418767,0.357029178657373,0.36824427878638283,0.3694606858354716,0.36217876283366246,0.3506653105377254,0.36434820801473505,0.3732270995464858,0.3647045576905202,0.3590387347089722,0.37455527708950476,0.3641342535834771,0.37691348941835734,0.35510219847531777,0.368797811044057,0.36510028208892914,0.37003973660192135,0.36933808992773143,0.3454854179966366,0.36634976148530723,0.36229202292615686,0.34812604097196215,0.3588629309087555,0.3902558544896857,0.35796105779442666,0.3756643561141207,0.36428421528036076,0.3703487746464049,0.3564026564442797,0.37575753669146356,0.3404957051830779,0.3653659626133979,0.37187973346021574,0.3716095439067486,0.36442368546173826,0.38084703426641325,0.37662346764342114,0.377317870000617,0.3685314168074962,0.4037529343010824,0.36669298222118674,0.3807445874324253,0.3948222846716062,0.3636973748005221,0.3758912133589458,0.36555652280245665,0.3773853748004963,0.3617402974293795,0.3504439005258506,0.3632214177425822,0.3763352278227568,0.3964551060331151,0.3665002142490174,0.36944155485210395,0.37231756807796856,0.3688494982992685,0.3655434210599213,0.3528082894528482,0.373200298192156,0.35993113130756393,0.38271738649769665,0.36085964995585346,0.36976086745960335,0.35460057847365545,0.3681970049055675,0.3748606380675303,0.3588209195247462,0.35740298613075194,0.3666504505743243,0.3540351502711231,0.35352633024952623,0.3789804121336006,0.38331617400131457,0.36403952550357055,0.38798319493844163,0.37237878330901897,0.38444733468406805,0.35999166364659874,0.38010321944385583,0.3820588797103921,0.3548583282466019,0.38922222669180745,0.35558600422325215,0.3766150720369539,0.36629840168863675,0.3677535240417127,0.3744140513475462,0.3720878571831707,0.3775143683491824,0.3685528798064479,0.36763154905644385,0.35161829132377387,0.3655038477510896,0.3800541751210631,0.3518905348136011,0.3617274970255579,0.3719847137083126,0.3757916185703583,0.3554772734444134,0.3598317362039868,0.3779622183715566,0.3691159589905761,0.3512464090964743,0.368499465238624,0.3611228808111722,0.36145269535876695,0.37043206786467775,0.3637267226615505,0.3804726059070168,0.3767882545789724,0.3773880786541905,0.35997947578785183,0.3621148762535224,0.39445816983549015,0.36282166611896755,0.3758929408940769,0.3763064695085304,0.3773480205793426,0.381411188301482,0.3661352208393066,0.35403372089913243,0.36582853250818337,0.3527504918278877,0.36688715155260715,0.3781409884137132,0.35263576271474467,0.37537519945475617,0.36861189434297637,0.36319414001833966,0.3663349807763244,0.35841569987796795,0.35727391817829823,0.36607601714019505,0.37422537950820733,0.36222018853189764,0.3504615799096706,0.37694914271819563,0.3699565454062622,0.3747734120695179,0.350771711966966,0.38164864677187466,0.3430463695491978,0.3783137611977244,0.36557284141000423,0.37001074394537087,0.3564350397546691,0.3897437921818475,0.3635682919783951,0.38727167837288345,0.3712119219574464,0.3471924257513565,0.36179796120513974,0.3640733759758643,0.37188629874380263,0.370748742015818,0.37655560603802624,0.3457085432782793,0.39520141806535786,0.38852946915048403,0.3715432061048261,0.3649004742739811,0.37896857211760854,0.38101729085585434,0.3677055855142384,0.36350971459235837,0.36621653800903503,0.3542610151241684,0.3511321980274717,0.3571069886130705,0.38497124816132616,0.3596359290421008,0.3816129978675215,0.3781843414944761,0.3370492392696218,0.3857559490752544,0.33821346741452246,0.3744403063220085,0.3684017191876136,0.37652680618137485,0.3624913521869602,0.3779615832430256,0.3749999365846198,0.3753245417793369,0.37258280316791886,0.3505807052676896,0.38417570839241966,0.37282133503746684,0.3833216700988423,0.3702336695115775,0.37450564708360345,0.3773737180590373,0.37864879531230156,0.3777878258460398,0.37025170785026046,0.38881482141890755,0.35863552196017984,0.38346535706746937,0.3621319233071272,0.3683944050459226,0.3484362397964543,0.36204478524237405,0.3706934418467501,0.34162142430982684,0.38286736586465003,0.38021330701369255,0.34634455743519654,0.3438061481249075,0.37441983407535684,0.3753318154184636,0.3739730045564371,0.3872301601484617,0.35404220556728566,0.37250069397028185,0.36084777622807096,0.3779957748579171,0.3796631195094599,0.3510798305772582,0.35890893822367503,0.36938872587989385,0.34147266414996397,0.36824146776146727,0.36929713499401395,0.3904107288823655,0.36896763950172495,0.37065885857658315,0.3633548773258577,0.3745939234655618,0.3755385353157243,0.3665392176266969,0.3631877382521089,0.3780577386143821,0.3576320679569195,0.35815916165650047,0.3707731192213381,0.36459454571451744,0.37132936167966174,0.36001651683364605,0.3774609191534295,0.37328076682872974,0.36411839089016274,0.34263563247642076,0.3844252165855555,0.3902160929521737,0.37078869196364117,0.3642391946740937,0.370820098070788,0.3712367654851492,0.3705852897302309,0.3893117654972201,0.3812597456065403,0.35714699806988937,0.36398137994073987,0.3820069481591807,0.39382944068164205,0.35352831366657056,0.35491813939718625,0.36461406486491715,0.3634351296033104,0.36370935995329523,0.3683685270232669,0.3690352875740448,0.36912938897812225,0.35317278812671266,0.3681903629209603,0.39181826584761925,0.3901169437865959,0.3720712821499584,0.37173028758805743,0.3785199544235031,0.3669460555084057,0.3815503027303156,0.3721172938811869,0.35712211662983784,0.360791733595916,0.35397966711648376,0.38338809905379573,0.36594608802806383,0.3744160551750216,0.3784918181778264,0.36640603549661316,0.372809037792881,0.3719309662398655,0.3779230984297014,0.36977551355798255,0.366309015058439,0.3765656831386312,0.36516709498817773,0.36825667654543837,0.3672253966523619,0.3588577619121213,0.37954166357621705,0.38315095508055425,0.3656419850666019,0.39350890357521146,0.3878646570871426,0.3656837224530884,0.36236409714557455,0.38618595326231264,0.3714304864964702,0.34472927175407314,0.3774695070714071,0.36879398010345965,0.3492601150730726,0.38020450957529295,0.38586875185170494,0.35401556089719177,0.37388924149466585,0.32814078173932915,0.39975687287485207,0.3544365949863333,0.37017655202073085,0.38331573957322057,0.3775436730991414,0.3757930706088001,0.3819644635683568,0.3768763709328877,0.36757390493204656,0.3815405929274865,0.36704307707419276,0.36131050952834637,0.3849334528162917,0.3693824021140216,0.37350253103704173,0.36851256185155207,0.38030847429164794,0.3758406961591534,0.36726300779370374,0.3774609823857932,0.3655591349820944,0.36266977022157176,0.369784777966109,0.36403854046986656,0.3640517006077594,0.36629304249341926,0.3692607319927837,0.3756762404555221,0.3692536821112623,0.38618633736876645,0.3791876498672187,0.37782662448199583,0.36393666197591196,0.3678232683660565,0.3553434979851148,0.3565984945592098,0.37478253157374536,0.3577811197750125,0.3612714040390571,0.38556284443128036,0.3592479901209607,0.35986159311849236,0.3710961106826303,0.3525365971732857,0.35626273796912156,0.353306657344423,0.36663755656746344,0.366832965154009,0.3665309144748545,0.3613211356711672,0.3501297557028302,0.36544793734827663,0.3774065149474558,0.3663982192441039,0.36275611380205014,0.3798315299515551,0.35815705165597056,0.3662417839201902,0.3693611255931552,0.35169236983777313,0.3776958158894273,0.3470726542110732,0.3780452259731942,0.3537372014713426,0.3966907007695402,0.3943217124849317,0.3699232650458297,0.4005210933626006,0.35790182583369473,0.35200021433980366,0.35018463647643394,0.36800128424926587,0.35076211430421844,0.38508456577199657,0.3699705615877727,0.3637628171707238,0.3778284560051176,0.37347645638376764,0.36776483857622744,0.36598900260812506,0.3649934236245374,0.36230346518424783,0.3846762893182627,0.3668560828989592,0.3788406083035757,0.36979693432837496,0.36715789779607294,0.3575303296560321,0.36025801186003364,0.3642907735073717,0.3872653916048781,0.35342126719946226,0.37245087461034815,0.3760825366272642,0.37109267753863806,0.37585815814437024,0.3802748195632168,0.37105196416137837,0.35576600893661986,0.3765354703066819,0.3355441967377908,0.388332264522235,0.3722451857591678,0.34139347180109936,0.3413602688355291,0.35957715481980973,0.38210774181851365,0.37467022689086,0.38225781175172335,0.3760895728326645,0.3653259793071065,0.3638455396712971,0.3914028300705693,0.3706960585100648,0.3646537434605736,0.36997229358994527,0.37533177477871915,0.34762621740855787,0.38861329379794296,0.3674429151972096,0.3710758430439125,0.3774485755137273,0.3571781187245203,0.37233734822715386,0.37244589035435355,0.3539870067159914,0.3695667045018474,0.390690949577722,0.375163219918523,0.34917837627023024,0.37261799284407093,0.3789432794943563,0.3422132289194011,0.3720085042740552,0.38943954932017766,0.36281325509866397,0.36237761960406234,0.3631685851384745,0.36838492392724387,0.4044703347509714,0.37486827003999557,0.34678620475577143,0.3732751636449508,0.36721600165142354,0.3742471278756585,0.36096612459814575,0.35889896959788953,0.3515126087368043,0.3577124678896942,0.3842880143442338,0.3723613865930489,0.36474907431213865,0.386542415126778,0.37233360902883095,0.3822185447447678,0.372090342057827,0.3597116380858313,0.3528074156874753,0.3698110843689441,0.342771304034368,0.3609146568834202,0.3603047201681663,0.36997600276788484,0.37591430914562934,0.3614644576450579,0.3572504216759837,0.36030347869951856,0.38148589452617326,0.34106973848749217,0.3725647899580322,0.3814124324967781,0.35859865208374325,0.3741339689716757,0.36522319511975004,0.3806423137295106,0.35236553600225284,0.3679397507836353,0.3775729292547012,0.3802082509535295,0.37634428814923687,0.36169489687334017,0.36411503917668797,0.36097430893932336,0.37203777922864967,0.3513730747636562,0.38508352178904043,0.3635145745042077,0.3435484005959147,0.38556698929555283,0.3697720908671159,0.3596405709498884,0.3805568264327362,0.3614526257006097,0.3804941091785219,0.3628444077445273,0.3565118221896538,0.3549258686555577,0.35740997395734225,0.3805597884308624,0.35329021587622034,0.3808927596462061,0.3924390176468145,0.38613952819181885,0.4111132568906711,0.3662581444876703,0.3753236155897331,0.36689016654173223,0.3568600967777061,0.3855230452137007,0.37055665339909377,0.3810166593734689,0.36783060477954577,0.3758221500380172,0.37894002033829016,0.3605873159982313,0.34946675812890216,0.3519617282273031,0.360579239973362,0.3770065278517649,0.3630639018075987,0.38093212556334205,0.37599976715782285,0.34173331119510375,0.37205524114487004,0.3594425535674425,0.36778632803157535,0.3749066068152492,0.34811022235907546,0.36332303360604784,0.3827739273442935,0.36856956036015853,0.3614812476117343,0.3711183802853446,0.35527751931993556,0.35221157070336445,0.3575156165882216,0.37614308305392247,0.37127633784615616,0.3778062194784639,0.3630452001492511,0.3391496463479067,0.3674332322743756,0.36164802590639045,0.3469071827938743,0.3596902618475578,0.37071080109088406,0.3722162093534331,0.37121621002929284,0.40253167356875896,0.37126580943771165,0.3290396750851402,0.36450913280009606,0.3698213105091169,0.4002153511055747,0.3427730177477381,0.3531134832694633,0.3666352488294088,0.35828799272891443,0.3798267006093228,0.3757518751163708,0.38135527781848677,0.3719540886460201,0.35250809616369927,0.36366324585609877,0.35930917158410114,0.38608463027633044,0.36477413430875877,0.3701465976491663,0.35208468400172593,0.37136042950331516,0.3675771096286089,0.3530158922673983,0.350805101576386,0.3714921338809488,0.386571465033531,0.3355741147929764,0.37501725999933444,0.34371359527821893,0.3656728953438016,0.38594446916774355,0.36683652231544556,0.365506257805038,0.40174727851544995,0.3840641508633115,0.3701785046321635,0.35513701127038155,0.36095608795990525,0.3470451300204045,0.359122681101106,0.3699147048475048,0.3606481057868474,0.3682862393562425,0.3839098268054203,0.3409833959713338,0.3660704337498654,0.35299288690926284,0.37359132111312054,0.3636265914522552,0.36256764928837,0.37615940940576204,0.38459209962354163,0.3679923141994255,0.3722920930360876,0.3797601020804301,0.3556505162797244,0.39092405987625,0.3834477346250786,0.371687594597142,0.35002555157376336,0.36348580748541826,0.3856328486106563,0.3610475437483435,0.36588148295770506,0.36063172226907186,0.37703997675007617,0.3687247236869769,0.38283177348041336,0.3644742547161428,0.3675928299668848,0.3640013399002499,0.37691174603913086,0.3793961259205184,0.35476728107071137,0.3833169988988661,0.36774146180265205,0.3425370314316084,0.35764568222065646,0.36210774671318163,0.3774981672188589,0.3529285803061595,0.37370164765404773,0.37424143302746077,0.35367262385889764,0.38777121430393474,0.3766152506974082,0.38081276075715276,0.369755295948376,0.3504092019109445,0.3417065021964187,0.3490752340859017,0.38176612565955803,0.3634503945846862,0.3597607491766908,0.38655149005172723,0.38117005737268334,0.3571390055658536,0.3748343139678258,0.36764377918190566,0.3812251441799553,0.35804014373462073,0.36417258753908865,0.37290063411525987,0.3701778852948223,0.3554342294836757,0.3649333671302145,0.3689081263654119,0.37244326047891085,0.3822305116940942,0.35319269312409923,0.38050833473367046,0.3431683974082413,0.3766319036137981,0.3727074724486338,0.3766581671130731,0.3528831322711345,0.3715418049580446,0.368368541964289,0.3697307345189503,0.3633310829344112,0.3827918697645006,0.36190330001122584,0.383190207308543,0.37754471558734987,0.3760629126962589,0.37101176538630093,0.38186227334326395,0.36812667815321576,0.35499269946550316,0.3772022452368789,0.371416731499509,0.3763924663537421,0.36656819670404023,0.3594442084393905,0.3360196310382723,0.38515095365910323,0.3677313274899135,0.3725633631297118,0.3772679486392143,0.35834190529785265,0.38879210906264877,0.34782075785472627,0.3709428018347271,0.364639302289145,0.36269985625953255,0.3712662781205059,0.3873955314673,0.3790881580661662,0.359838238091403,0.3643008635356594,0.34338873664797726,0.36488327260490644,0.36762664265659656,0.3700253699930951,0.36503443642364786,0.3680158673574448,0.37534770256877353,0.35857292930366536,0.35236115076444163,0.3763907596923924,0.3608929409220524,0.35362270432738285,0.37218040859394375,0.37949364696765736,0.3620019247748765,0.36363334472862674,0.3681493609178493,0.3755908408554528,0.3538650197539328,0.3704506130440475,0.3871913720439181,0.3527355747118437,0.3688938856827083,0.3922220681073953,0.3540963317828293,0.3516539952371625,0.38137444894002215,0.38076085573263735,0.37668663447222456,0.35876029816635047,0.37606595527050546,0.3626045598168451,0.34754178500861915,0.36812871795069796,0.3796075510492591,0.36413051951568526,0.36783055850971774,0.35240293739158773,0.36958561742617035,0.39089496796485296,0.38651560283499564,0.3486000610822453,0.35998651679243826,0.39523005835976655,0.35712071517460175,0.36570059064605664,0.3614103069021651,0.3577577720074323,0.37838603822933525,0.3489375344373113,0.37114251294359407,0.39261706631214427,0.36034343479675407,0.3707034209560364,0.3800796268691016,0.3588160581332105,0.37080059638621765,0.37023110034545215,0.3723284390181603,0.3767260952349839,0.3827231759825267,0.3450144297846508,0.37112653441129795,0.37620011280965276,0.3566955765956383,0.3753488816117648,0.37015197674375466,0.3600104571468023,0.38024123867320386,0.3755764165434982,0.37890082490228527,0.379875291851091,0.3809614107918109,0.3658188346295502,0.3533936218426868,0.372474292662038,0.36824280996470365,0.3603798203138124,0.36129067407757026,0.35718554074091263,0.36286083076485526,0.38845637082993073,0.3908053154457142,0.3615695980348552,0.361458304620428,0.3517137432809667,0.3730929120143315,0.3788822247091451,0.36180318998533495,0.3729902873100148,0.36808137580730543,0.3693888778703974,0.3725826746154321,0.34979395595959833,0.3642462234217978,0.36179045265426646,0.3697378167123252,0.3824298189326272,0.37466305598340427,0.3906643218966076,0.369409322378467,0.36527929772261597,0.382320226731699,0.36718423857866106,0.3652090752664598,0.35804464749052073,0.3686529368905726,0.36563926799030333,0.36184333697151505,0.3824295402438927,0.3619833689124552,0.3445212368199416,0.36069172171344327,0.36214077537790523,0.3729639113217573,0.3676689946443536,0.3643962971569041,0.3802251437251929,0.3805750222947187,0.36824678893919277,0.3552586469024579,0.377090941990414,0.36086643066735324,0.3649226537550668,0.37861477071142347,0.3985358799867843,0.37265491969934444,0.389653498604302,0.3654328346515018,0.3820219003989425,0.34429222071756915,0.37096237287489914,0.36954048776334447,0.3828322549131376,0.3619550359098251,0.3706633169750081,0.3620679529908133,0.36855500427619664,0.37123870671848425,0.37679651386134877,0.36615293285770245,0.35726982362221393,0.3618671005399234,0.40480581657353065,0.3718522090568709,0.3720714395648793,0.3672649068210318,0.37781777402305966,0.39019146463583143,0.3896221955120629,0.38149961189212905,0.3652381053335392,0.38044892417278614,0.35915069502436026,0.4027156097553113,0.3666391155330601,0.3559217622367033,0.37807896984011735,0.3730465226915696,0.37162825427693746,0.3866084607190972,0.380177111539802,0.35869180144428847,0.37698766117374166,0.3662724088543464,0.3555937409666386,0.3454308948615295,0.3479829763311345,0.3704113744370363,0.3398236429549173,0.34533777242638747,0.37451928073357327,0.37999919650175,0.35121623931206836,0.343770602846851,0.364889828598997,0.3562144889919994,0.36947278062704925,0.3624223741158753,0.3567029064615323,0.35207865379854025,0.3902175527681741,0.3758883559100284,0.38295398210113485,0.3613301094138587,0.37310686573946983,0.3654465734035822,0.3722096870979055,0.37632333564504566,0.3760527791967813,0.38759829879099483,0.36469796739733507,0.3723047304105643,0.3746645290751232,0.35856523137570645,0.3624960690513904,0.3559725171632295,0.3704056509099632,0.3680133871253722,0.3732372423470032,0.35076601174066757,0.3784430601916908,0.37857359001967733,0.36533121391866724,0.3963729674167389,0.37677176594112755,0.352156256515063,0.39080611783440977,0.36976646411562225,0.3735441878144051,0.34751292357788927,0.3832161369394991,0.36330965667871523,0.35622204727039614,0.3767843958969176,0.35962566898738685,0.35616424968791793,0.39810262342384156,0.38587543823172027,0.37068579051206035,0.38582316084093293,0.387070077045736,0.3711862978020789,0.3758931971495334,0.37527409206339823,0.3612604810901017,0.3581347339775463,0.3747285391457077,0.3641721973112408,0.3747336914662366,0.3600532571422388,0.36354143950273365,0.37629623950413077,0.3623762418776741,0.3545608824432024,0.376125735695116,0.36873198089778864,0.3667966220130163,0.3781234842368583\nCONDUCTANCE_PM,no_expansion,0.158090447083,0.00960524173281,0.15328298886661876,0.15800754430036368,0.1585893135848388,0.17572844282363884,0.16178522096097636,0.15273472550758482,0.15714256980415478,0.17554102656512355,0.15618113763894223,0.1476503659240995,0.15215689231077878,0.15658072952723437,0.1731228577403861,0.1588561674410624,0.15966617813334408,0.15836409441720756,0.15084868892357509,0.14854466482522702,0.1764880553708486,0.15328744420674645,0.16219432791903116,0.14509461777141996,0.1585908616115859,0.16249056353871605,0.1561978362274878,0.1597677303567851,0.15563542544534317,0.1535322485452946,0.1439323937592677,0.166085536621772,0.15997065561605447,0.1649109457886761,0.15838958303286976,0.14964708970850993,0.15953408408140843,0.17923811205324747,0.14411130424548635,0.1455313833570651,0.17050698182374707,0.15448510492171447,0.14989133707707722,0.13756870412211236,0.16369362521397626,0.1514312173120033,0.16745705312910517,0.15744610348416987,0.1674666779950514,0.14424585964526795,0.16413472887739938,0.13847893228241243,0.16368843771503935,0.15666620942463066,0.1541214778276722,0.13650930139601422,0.16476600051436982,0.14569522477328645,0.14689639235715338,0.1382113559038026,0.16018525073911954,0.1552005981179053,0.16909133729200604,0.16292770595869344,0.1451928440026873,0.16263113372774068,0.1534012628664917,0.1631926161049651,0.14842340100244836,0.1512398603145354,0.15800400628596137,0.16696069765787286,0.14521351150554926,0.13640244820455222,0.1482943942537345,0.1530914722769516,0.154979695355827,0.13408788349843842,0.16182103382757854,0.15712348838309825,0.17197341398657806,0.15901338051647892,0.158149202818227,0.159788451711204,0.13991338949531137,0.16479786107306252,0.16415776750235456,0.18379985370499072,0.15835329045564994,0.15897512421888182,0.1604678724763757,0.16195425165986313,0.14968810441420297,0.1469514368036534,0.1440307306719113,0.15078427021049226,0.15869443193757235,0.1750087046023374,0.15157056094819465,0.15303993757557657,0.1621824903924506,0.1742204607301569,0.15626851387483962,0.16303129709848804,0.15382894230234698,0.18026501692426336,0.14964237870723326,0.14101125559239097,0.1491186480192602,0.15224322759173373,0.16172703729656243,0.14840097681387682,0.17541246183192505,0.14865497932041258,0.1448466919958099,0.1608944837667855,0.17209906017789692,0.15966313399463405,0.16760868088834183,0.16273655301951345,0.1378745202481458,0.1528526231166321,0.15454525907504751,0.15720215237528337,0.155362107292244,0.1426475686646974,0.15702028565640774,0.17801548373388074,0.1575906621847995,0.16174729959333756,0.15587643568214504,0.1669560249038364,0.1608856393419371,0.15997816000971082,0.14931417491335106,0.15717790458895353,0.1580927316960192,0.1589267170577036,0.17204835099797203,0.16140816139160355,0.16337258201760452,0.16172397626660182,0.1682115579999607,0.16336448373505216,0.16041387023369172,0.15814409854462477,0.16118351584887394,0.15585708866367406,0.15546900839044359,0.15111708771400084,0.15204820243930628,0.1480693107368308,0.15217683684067262,0.15184238809481562,0.16156005178078994,0.1794425444247773,0.15214179289378546,0.1623628097955253,0.1331721357929756,0.16646028524731576,0.15433202579439487,0.16922364438754295,0.16478561208026726,0.1532383311733064,0.15852338838479552,0.15905610545718366,0.14137385077471792,0.16930539087021476,0.16517023586557827,0.14503342131298616,0.15013057851567846,0.15916189465640576,0.16115527575977442,0.15704373084122358,0.15455267774397471,0.16048844505281432,0.1653912934978769,0.17126394803938969,0.16253690625273756,0.16330560687692672,0.15949380422247764,0.16834827615584277,0.15967765889620428,0.16427391995553942,0.1575945845829813,0.16403501058086603,0.16541243779367384,0.15517930044138617,0.16590691051151235,0.1756414364647938,0.1526754408689364,0.15490994424679513,0.1592696717996979,0.1701860496881225,0.1570413281478481,0.16467074394832976,0.15533416089958366,0.15533422775178396,0.14800579704610126,0.1510776733507575,0.14394201734752948,0.14681520689756666,0.168660501802453,0.15652131179156423,0.15908271371390917,0.1718254564370045,0.16265763135415548,0.13899498938032878,0.15988378397781755,0.1643003786895964,0.13756313694914843,0.16660443512591297,0.15767226999543682,0.15752679035449918,0.1568913130298049,0.17183590931462384,0.15174188616734302,0.18166110372957073,0.1696236711909345,0.16676675876956848,0.16945383230392258,0.15265344196614652,0.14628585539489528,0.16848548838045338,0.15482765875114193,0.16073811039566124,0.16758025372286203,0.14986382759444924,0.1585259047944262,0.1562213590272194,0.13893928861556454,0.15892472545815878,0.1499995399196464,0.14116652203397823,0.16017591028178713,0.1617238354225605,0.16639914509366455,0.14419941316725796,0.15449802811477575,0.15616328588609593,0.17894337508348263,0.13527234681161032,0.16645654390611692,0.17176035690992497,0.15382133777879534,0.15747696367429506,0.15914808168080197,0.15953881605468226,0.1479668981038588,0.17005436083727812,0.16751394495045827,0.1546139889321348,0.1502770359231375,0.17189169965027196,0.1706942690391644,0.15785247914112885,0.16159496230850834,0.14623742057891542,0.15923967621565072,0.1483596561740809,0.14507361555063775,0.15561933680878146,0.16087611441532723,0.17024308104483815,0.17408978589648816,0.1635985521553532,0.1641389754746247,0.1633658716475622,0.15855600799832612,0.15601443342069796,0.16217792392946648,0.1572757958542656,0.1607506686529123,0.17056959264069063,0.150274014687203,0.15586481348961442,0.1627686745397893,0.15807304110949974,0.16541585549010643,0.1443505614222614,0.15906251315347034,0.17982936586496603,0.16253718180542218,0.14814451630180786,0.14822305977028538,0.1564652222865612,0.15954928218010778,0.16582100340669206,0.16764994590229843,0.14349805137243532,0.16065740863834918,0.16381480221054115,0.1636808547130797,0.16232573861558292,0.1586967268462752,0.15391123038656135,0.16120524599971753,0.15884280510189944,0.15290294557230266,0.16637188477901316,0.15005202076629925,0.1432402598062842,0.15527972302690646,0.152397215582677,0.15489298577559554,0.16512238645873878,0.14345842528441466,0.16471947577738516,0.14978358731688066,0.16947326657160067,0.15335305447489245,0.1613037237510233,0.18817469904423487,0.16012190585337305,0.15494046589682078,0.1575803734155131,0.15392543555113503,0.15374323489862823,0.16799225889149466,0.16632868252475694,0.1560331005543943,0.17840006839114328,0.1720193437462267,0.1742399098597124,0.1589509564679129,0.162397499124643,0.14819628362003745,0.15106568397791623,0.1665557102328824,0.16880103270418756,0.15818674183764356,0.1659108360324853,0.14306814925588027,0.15845328956667734,0.150816337567149,0.13704875942378392,0.17612456746043226,0.16979430838640716,0.159633360381609,0.1631818956922549,0.13633968417558878,0.1652087134777392,0.1690025689076292,0.15362723813990672,0.15198980173697538,0.14568802842173534,0.1668292051277661,0.16294988080275566,0.16150092573601127,0.1695535127452081,0.154886098008981,0.16289061972780686,0.15515214106473343,0.15837020587670492,0.16978741540036862,0.15033838938648458,0.16213381742469019,0.1647886647437366,0.14550350103196272,0.15211969897867106,0.15131679727149105,0.16825257559063583,0.16720994483085505,0.15824273722945792,0.15438123223622396,0.16059278949101308,0.16779315308819287,0.1570442219854577,0.1622872072426919,0.15874188582220303,0.1657514182301647,0.14213335454766346,0.15181656990754971,0.14509964653361682,0.16751685477398365,0.15301466292603294,0.15475963351947206,0.15784221284273486,0.14993376778456782,0.17294393479242612,0.15431757598775306,0.1575383474442276,0.17778066601061515,0.1634736027124799,0.1391280206819233,0.1577721181482076,0.16844205621829714,0.15113495653163295,0.14674689732514443,0.17937469768924108,0.15921724896003545,0.15677706747301257,0.16363007054443987,0.16985364447378123,0.14927618975039836,0.14826407338154096,0.15517390533634023,0.16136974928675124,0.16226539505152396,0.17108349763185532,0.1604258099793261,0.19136274746532714,0.15464589049209582,0.15541620480358534,0.15414060092843226,0.1510736441770678,0.1581111040475346,0.16232456163526582,0.16810537045607785,0.1613705686234828,0.15678649473218192,0.1601374268235923,0.16457233269984323,0.16229620548370927,0.15563282433514508,0.15434936988180673,0.1672097334849149,0.1490817805158616,0.16036565628152175,0.1486814440398538,0.13053627856304167,0.14072071224464147,0.18106958909292617,0.16193328223695985,0.17573469005754846,0.1659348184107353,0.14874517890228742,0.16356823768506573,0.1667103821132658,0.15056020541332746,0.14494979131650498,0.17910372452922854,0.15283138522681552,0.16569589409473792,0.15625696554762505,0.14883857161627054,0.18212526312488372,0.15029536055701792,0.1526970831465101,0.15264444544414618,0.15957056108558715,0.16400685790079148,0.15923990620916892,0.15037926776338303,0.1615426459885405,0.150833807540286,0.15298729308521375,0.15061958614462076,0.16516764386986,0.149689073937995,0.15806066870820307,0.16752752149653022,0.15210104181767617,0.1548549605964467,0.1538718966170553,0.14628385300094193,0.17628435957067218,0.16443459388441925,0.1573482965588496,0.15395217152962062,0.15307282937941535,0.1406874698190151,0.15386575634868466,0.1474875715711937,0.16347088286168152,0.1577535810550037,0.15140140890235698,0.15462201138057652,0.16614241459456777,0.1542242124647782,0.16669269026467226,0.1618535308499915,0.13459287704355627,0.16548874874336736,0.15966047860251115,0.16094858106951335,0.16880392414280976,0.1800973602320437,0.1547048639536738,0.15583664245637327,0.15098619740685867,0.1604086084946959,0.15000904804426435,0.1438663602360369,0.16967657641069847,0.1493119773936293,0.15360093889079243,0.17277340422933282,0.1365866867507033,0.16714670434427822,0.1807653276831701,0.15274518410878601,0.13813982530955288,0.16487209876459238,0.1583170520016386,0.15326826640498764,0.1549203449529798,0.15476635955474058,0.16021433026881748,0.17567410635770617,0.1577641700477372,0.16927546789142445,0.17058468701575014,0.14666241517735437,0.15418124470690686,0.1606713775756119,0.15258274442324155,0.14432116155685012,0.15754640156352334,0.15191252291363527,0.1743902011283872,0.15451340947555345,0.15332675563512935,0.15296157911213268,0.15858875210338869,0.16838607341962347,0.16240392657261496,0.15182630606486516,0.17591894817814535,0.15147151834949488,0.15879871973047982,0.15620075083837764,0.15059187203370344,0.16135877411424202,0.15291612028291346,0.18101529192209714,0.177464948561987,0.1665167809184228,0.13430480979647916,0.15746960269450863,0.16334070433228245,0.15112188694880566,0.14865923789778024,0.14649121509852786,0.16931601494940507,0.16464709267275598,0.17606043778284847,0.15576185288661973,0.164338682202438,0.16324842359893685,0.15848454731875314,0.17181792461527348,0.17847470967600562,0.1602116228243475,0.15319527125927704,0.16450954878401597,0.16112062975058505,0.1664535712566887,0.1611277840601639,0.1561980310471147,0.1580282460607358,0.14627956912048026,0.16164319544430364,0.17575957558325322,0.17319789991941273,0.1621734660614816,0.15573201624827726,0.1667018673548618,0.16670651951188056,0.15896278577825593,0.14559735859075912,0.14842143999265145,0.15307512237300439,0.1403627271002147,0.14740180563279248,0.1628838227644532,0.15931252007362698,0.1369125958844388,0.16204578452514948,0.15671173178980344,0.1466759718490537,0.15593115366963123,0.14665505319808714,0.1633927117577123,0.16473798720013266,0.15316745629489503,0.14987765361293914,0.16219197278339956,0.17323446851482494,0.16007007986178745,0.1546342484912243,0.165795529337873,0.14406501191456555,0.15550450451352985,0.1583622386055178,0.15827574227309985,0.1679551920512638,0.17300334729341857,0.15583726484301597,0.14458247536325586,0.15344542556867366,0.16230919125990273,0.1709198831479464,0.16532524587446173,0.16929772828268694,0.17129906531256556,0.16370596778144153,0.15995908297416195,0.15870101862913946,0.15785588455496657,0.1549451810003529,0.1582164831315663,0.16112146426913557,0.16194561942464758,0.15609830376013845,0.15951778292260674,0.16650113302785016,0.1525103555560056,0.1662422345982613,0.1603793183782932,0.1361497333004791,0.17064563078697417,0.16316053152113089,0.15107271459488106,0.14825587185317113,0.15267397448056025,0.16066878658332784,0.16052135863365977,0.15600313928218595,0.17266892751490787,0.16580197634116176,0.16019528686317322,0.1788835246818998,0.16845071129817044,0.17331224637798986,0.15269130090220986,0.16621103248537036,0.16107937890781462,0.15687761491720506,0.1568563267588357,0.15155817815075973,0.1554427902174461,0.17657298145535155,0.15534185929613392,0.16611427103431708,0.1500376152775164,0.14315546037484028,0.1852537496809517,0.17056471179260224,0.17293916461569905,0.1687669433661407,0.1636718917595242,0.15098238357654417,0.16763264668810637,0.15659546533995214,0.15556864380006521,0.1516289343459909,0.1536841403590766,0.15302723243623836,0.15438792125294767,0.15665005149036565,0.16398188876277484,0.16828769526518067,0.15935577918576374,0.14132683461312,0.17105395162077766,0.14342685922266413,0.15027328536880405,0.1595423160189746,0.15380772852002883,0.16556649942917584,0.16457444802386784,0.16344770969672467,0.1516245126932595,0.16112443083125688,0.1471642093676688,0.1632465977496203,0.16277995336888837,0.16871135738482987,0.15369061316218252,0.1697949302951105,0.13431789263007773,0.153399948330064,0.15726491303763573,0.14706243141664735,0.17641284222251316,0.15652864348510112,0.170144249797788,0.1663914568141567,0.14132814386188705,0.16683550064965022,0.1731996210494088,0.14347666367716536,0.1571812402964158,0.14992253039346853,0.1688000723316631,0.16043520102692105,0.14341262329475268,0.16571167494452055,0.1635364458288709,0.15151381109905004,0.16509501414555672,0.1607032074416683,0.16628997905952017,0.14650816008793835,0.1515666866756716,0.16198236382630773,0.16483404352464062,0.16240620711307832,0.1450097088109625,0.13926051183130891,0.15588471275457166,0.16191774912640206,0.1574689168036989,0.15675147597070868,0.15601045179463569,0.1389878147662082,0.1606543067203559,0.15042819609180683,0.1748275888322798,0.158542373976944,0.1522592353593322,0.14675697885093314,0.15319642776586445,0.1722213621782531,0.13503765181929553,0.14847929937587356,0.13752267820323255,0.16777604788387668,0.16658551204393843,0.135189720289584,0.16973403483273142,0.15810170608547786,0.1578543938410665,0.14654420907615776,0.1547778581698514,0.15980341331362222,0.15213052297714738,0.1495733396276683,0.15319055229942616,0.16420990082698342,0.16845834558566924,0.15564658748292085,0.15138547393681007,0.1401468276300527,0.16692594693396406,0.15434616851349586,0.1511356866877851,0.13395594836746333,0.16198439079012114,0.13446451995617528,0.15650355913893202,0.16083945077251843,0.17221458864127573,0.14896043907861256,0.14660711440248428,0.14928954525573665,0.14979703389533067,0.15838614118444821,0.15563111781112565,0.15916713808286687,0.1342612793396463,0.1632112231749536,0.16682251533154813,0.14729240608408264,0.1497453453359373,0.1516796507223939,0.14824059577175489,0.13800632823055567,0.13942510865026958,0.1627200720732556,0.17002233577693351,0.1516803455744384,0.14571335972248203,0.15786914706691926,0.15430824997055573,0.15713836771679002,0.16267326145941535,0.1839237943062003,0.15446484641939737,0.1420612390891772,0.15689864444174753,0.171777353802744,0.1499727912627276,0.15852637059927424,0.16327349233956806,0.1624972616771387,0.16310652275829793,0.1530377860587892,0.1544122812172051,0.16425237385899283,0.16073669483297823,0.17462728828361734,0.15837212901664424,0.16450992866850545,0.16503763120976062,0.15449429295216202,0.167072057861738,0.15875619054682016,0.16779733832029792,0.16204157553338563,0.16770932414833867,0.16165946956121613,0.1611595680921123,0.15367422111731632,0.14700219835674086,0.15903056464594192,0.1538347249234885,0.1748138840040588,0.15423104247407868,0.15175962537500043,0.14755164272830815,0.1623035587969654,0.15115495016303537,0.16526872783343521,0.15931203362448612,0.15213243242594354,0.17225574607699595,0.15447217164961527,0.1744663643918256,0.163252268390531,0.1559051318831729,0.16977265129210165,0.16300455078246442,0.1493893303289477,0.15028144899174362,0.15578515257959338,0.15481588928201617,0.16233003115028213,0.1602310777874547,0.1503794988374075,0.15129586873813128,0.16193435318391242,0.18198883644672884,0.15317121391666932,0.15441964328231766,0.1597309618168458,0.17083836919850304,0.15602588484526944,0.15962576995977965,0.15365358149694866,0.1551482070867827,0.15985515559407873,0.16719807839016068,0.16505450501967533,0.17117744606714225,0.1530977644926618,0.1701873402398648,0.15770158679679855,0.13791742606442778,0.1525967109401415,0.16145716537694021,0.15295949474351153,0.150924625441749,0.16697318079361137,0.1440188956237718,0.16117757241895095,0.1554789791770451,0.15300302263988794,0.15659877399967545,0.1558613040915517,0.15122305157908258,0.136705406317354,0.1814318273372694,0.15369925800248854,0.1582803714274382,0.15518160905258646,0.1535913419112406,0.15201459410939566,0.14698928584847734,0.16860191832432983,0.1651571927743689,0.15722964427627487,0.16889144549120833,0.16767384570609747,0.17059525669193829,0.16938685000580017,0.1504394235631122,0.14937287291069679,0.14848610127142006,0.15761824699534469,0.14426657612555857,0.16625466159640898,0.15535438448275854,0.1437026769918491,0.16022436003640933,0.15514714428898624,0.157545193429316,0.1648429920081411,0.1342319905581882,0.15669145681202393,0.14284623053730058,0.15478712419916615,0.161054154100615,0.15792889562286283,0.1621314541145334,0.15189058530145855,0.16887078247412485,0.1586370309443971,0.16758752508019528,0.16819595539005655,0.16864644065737128,0.15668446038338793,0.13845509430412714,0.17487734993012424,0.16717673016604936,0.15173214601958304,0.15729153131300336,0.15442200411163845,0.14698670508196773,0.14775763279530185,0.16373476520835226,0.16448599810163514,0.16957127881738215,0.1557366308795347,0.16442055434336378,0.13838568509567817,0.1629609647539801,0.15666111031540159,0.14628360536706606,0.15895255640982986,0.16354130218089868,0.15780582768413004,0.1531263398787144,0.13403507768521356,0.15559810765028426,0.15095055741139793,0.159512914242166,0.1697495759928343,0.15012656131188534,0.14376843506934883,0.16135014924573887,0.1737057124842505,0.15406714842009664,0.15615080536923717,0.15774167669391934,0.16644386510471992,0.1497681703524346,0.16443065350448885,0.15091972389350392,0.15246326217081022,0.15263753939576083,0.17326009881845994,0.16355899824411727,0.13782119181635993,0.16103064052088728,0.15535333792172987,0.16624883324631554,0.15620672828524682,0.16225174488873562,0.15498648203913093,0.15962009024441795,0.14463325779500064,0.16118428748860752,0.15590156275698894,0.1572227701263503,0.1550139642189676,0.16308044321434678,0.1514935858909192,0.1540452752130216,0.15648158823366362,0.16182297598424966,0.14199184536061144,0.1548475612666724,0.15774910314802998,0.13911063728956868,0.1474571147434356,0.1748347322402639,0.1558811261119211,0.12974532803375652,0.15041269458235315,0.14668206077790621,0.1644158118336798,0.15934156843106315,0.1648697180831871,0.15773974034801153,0.16256669878470348,0.14992945649530398,0.16987404029126063,0.16148464574358884,0.16451321078765518,0.16361708189112625,0.14668535962642612,0.15306222447052886,0.15603079570508502,0.14711373849613046,0.17542182170991646,0.15822193262805945,0.15765153949510438,0.14760042536632081,0.16757230997232067,0.1631999708528875,0.14690973900426954,0.14900977119166525,0.1522367640924074,0.15474621534937302,0.15571600430824012,0.1463269191416931,0.16371765700919974,0.15076136829769296,0.1707492911312362,0.1684571860589294,0.16083889109490165,0.1596719638152505,0.14850674690920607,0.14836885437684127,0.1794565247377412,0.15158398002305246,0.16169311454261837,0.16392982046814172,0.1634275277432367,0.16277887450091652\nMAXPERM_PM,no_expansion,0.39095714842,0.0163820133327,0.392519974,0.371490683,0.38864771099999995,0.359869317,0.364151564,0.41352931700000006,0.382067704,0.40573381999999997,0.378070659,0.389383193,0.38335894099999995,0.41009450500000005,0.378235687,0.407103023,0.402034986,0.406963592,0.385504808,0.37923737799999996,0.396448167,0.403332163,0.393487496,0.371144654,0.40300219699999995,0.38448520399999997,0.39751300700000003,0.39263099,0.386015964,0.37823267000000005,0.41925177299999994,0.41416801400000003,0.40274192600000003,0.407440271,0.40850965499999997,0.39755860099999996,0.382510238,0.367940383,0.440179442,0.35255404700000004,0.37342766499999996,0.41152388199999995,0.376463821,0.42363521400000004,0.408654003,0.39007053699999994,0.38217729399999995,0.35601604400000003,0.39877954,0.395030756,0.365786192,0.419386664,0.404392735,0.414902467,0.38742938200000004,0.422681673,0.371208442,0.38470612400000004,0.39962907799999997,0.394512247,0.384868925,0.34586387499999993,0.379052896,0.384177122,0.406738351,0.39357628299999997,0.380886085,0.40073584100000004,0.40177135399999997,0.36321775500000003,0.384833954,0.3971229089999999,0.372230921,0.392487342,0.371020772,0.421329862,0.36600415799999997,0.378354922,0.37113872899999995,0.422691808,0.361299014,0.38025824599999997,0.392240788,0.427431915,0.38305969400000006,0.41555440899999996,0.36643024,0.376386387,0.42522802099999996,0.402707397,0.370082829,0.40525129600000004,0.40184677199999996,0.388652323,0.39028362799999994,0.39704645600000005,0.395091261,0.420538469,0.400690562,0.394309198,0.417160254,0.36771316499999995,0.39604848100000006,0.393862499,0.379268295,0.39460146700000004,0.39904913899999994,0.431673439,0.372415334,0.38733708599999994,0.384332028,0.397544717,0.37599565499999993,0.37434730099999997,0.40670375400000003,0.38812321099999997,0.398195651,0.395273514,0.37842572799999996,0.39146599899999995,0.397539627,0.383563139,0.390227216,0.38834152699999996,0.38991353399999995,0.40573533399999995,0.381076129,0.39239846899999997,0.405141699,0.41052883700000004,0.416096359,0.37205604,0.40657975399999996,0.381719574,0.37882172599999997,0.426926989,0.398493819,0.380551941,0.38326144999999995,0.375945552,0.389112494,0.39982129299999997,0.419358145,0.36421238399999994,0.422721814,0.397436432,0.375382976,0.386461606,0.39134122099999996,0.394294232,0.370920767,0.37006388100000004,0.413441095,0.42021922500000003,0.379053112,0.39646498899999993,0.391987259,0.39333772899999997,0.40107015600000007,0.37229828000000004,0.395745388,0.39975383899999994,0.371350962,0.39098158699999996,0.38099696,0.37841547000000003,0.386545023,0.388336838,0.395092492,0.412344002,0.38346345,0.376982286,0.40929115800000004,0.390544249,0.40624897299999996,0.423727084,0.37667027999999997,0.362729444,0.361304843,0.376866015,0.39539551600000006,0.39683086,0.384673338,0.38510335399999995,0.395448335,0.396311124,0.375787839,0.406394603,0.37602599400000003,0.35342086800000005,0.379369728,0.37457767000000003,0.397089704,0.38193335799999995,0.38893432099999997,0.413809144,0.389696798,0.37088168,0.399126345,0.389740771,0.397029236,0.3854036599999999,0.409337487,0.39681966599999996,0.38044099800000003,0.360233152,0.374855594,0.39073308399999995,0.388440795,0.381623517,0.38983643300000004,0.40393643700000004,0.378858628,0.38425083300000007,0.39672683900000005,0.39336666400000003,0.38204150200000003,0.374920891,0.36897017300000007,0.370841362,0.406106666,0.391154537,0.40816379799999997,0.379394807,0.41432306599999996,0.420356325,0.38476128,0.40818228599999995,0.39139008599999997,0.37506924899999994,0.406068763,0.38834916699999994,0.395451634,0.38905066499999996,0.404729756,0.4121527159999999,0.41957526,0.38696921199999995,0.381291006,0.42919383200000005,0.38412423,0.391616604,0.39319604500000005,0.374455938,0.417991762,0.401518618,0.41073574700000004,0.420019345,0.41366799499999996,0.389090062,0.39379748400000003,0.38250785,0.395635474,0.397019547,0.366135332,0.369523245,0.37086044900000004,0.40654870800000004,0.393104326,0.42123288600000003,0.38270449300000003,0.396926914,0.40205094799999996,0.40176965000000003,0.40820192800000005,0.40025665800000004,0.415526774,0.395192292,0.40698115,0.3955059269999999,0.403180251,0.40442693599999996,0.37872912599999997,0.382156982,0.39038151899999995,0.376553528,0.395948812,0.393964618,0.39471009100000004,0.392789398,0.40181071900000004,0.385681536,0.38599005399999997,0.36785117700000003,0.381181171,0.37556774000000004,0.375140298,0.370520475,0.368327844,0.388009774,0.39983666799999995,0.374354771,0.40708052499999997,0.396669781,0.38817373699999996,0.38738399100000004,0.39157137599999997,0.39238928399999995,0.39799168199999996,0.38794312200000003,0.40232923800000003,0.38946049400000005,0.396909097,0.40128905200000003,0.37236668599999995,0.392536653,0.37756101799999997,0.38354721299999994,0.383474917,0.38200665399999995,0.366141326,0.36598665199999997,0.36425806,0.41452547,0.39968301100000003,0.38849461399999996,0.422811926,0.36852948199999996,0.38322770800000006,0.39263824999999997,0.425279083,0.412359179,0.36384017500000004,0.37228056000000004,0.40119903999999995,0.378436461,0.43181447900000003,0.40622830299999996,0.387520961,0.409431148,0.38698769200000005,0.41000567,0.403228882,0.385686114,0.39506504000000003,0.384699525,0.378501515,0.388987437,0.41412327800000004,0.40050169199999996,0.413024831,0.372287884,0.38221553299999994,0.38218792299999993,0.393460908,0.41271613300000004,0.388751435,0.40492805099999996,0.37504150500000005,0.410791927,0.39396568000000004,0.378688097,0.39430837799999996,0.43391218600000003,0.391716475,0.385599576,0.402168249,0.398992343,0.400144732,0.42195551900000006,0.382961164,0.38267765899999995,0.39643739200000006,0.364280805,0.43567037200000003,0.370787927,0.403579267,0.369932014,0.38855829599999997,0.40309675,0.392111827,0.430797896,0.35423191099999995,0.415110348,0.40279476,0.372602749,0.416263943,0.414001312,0.384727854,0.401184903,0.37183031099999997,0.381236696,0.384341613,0.400160539,0.398153748,0.384224835,0.381552164,0.386454028,0.429251076,0.38968154800000004,0.39120896,0.386654066,0.429836183,0.392194852,0.38326853,0.386534744,0.375894382,0.38030575200000005,0.39938102600000003,0.41458562199999993,0.39592492700000004,0.37698165899999997,0.409368403,0.393594114,0.37016233700000006,0.374661224,0.368408998,0.38392099099999993,0.39814150400000003,0.36787958600000004,0.398768761,0.394176027,0.38060718599999993,0.37734274900000003,0.379529454,0.404637623,0.410083193,0.40627763299999997,0.40954901,0.40211323099999996,0.3870694559999999,0.378598319,0.410987166,0.38464181000000003,0.390224867,0.383607109,0.41633356799999993,0.358098621,0.40926773200000005,0.395089924,0.398317065,0.42929620599999996,0.37714320399999995,0.401491208,0.38620193900000005,0.379648964,0.372913793,0.389850804,0.409535688,0.3841118,0.39176798999999995,0.377693714,0.403564391,0.38027083,0.386322166,0.385053011,0.391189868,0.385303249,0.381042722,0.40373156300000007,0.378705459,0.38174019,0.39102464000000003,0.373237126,0.40399766899999995,0.410712639,0.408330697,0.39137103900000003,0.401423576,0.408744648,0.40593980199999996,0.38052243199999997,0.383633066,0.36005096000000003,0.36204143499999997,0.41915252,0.379437058,0.35442644900000003,0.349268923,0.402900849,0.389350413,0.370875454,0.42768176700000005,0.388233383,0.35642352600000005,0.39611376099999995,0.424878415,0.398259355,0.38688422399999994,0.41307764299999994,0.40121039900000005,0.391381191,0.38553485200000004,0.38709666600000003,0.4157790699999999,0.38929473000000003,0.39713991600000004,0.393938994,0.39485015399999995,0.39834682,0.35563883700000004,0.43175755099999996,0.411060945,0.384088919,0.38975657799999996,0.422887444,0.351717351,0.388414151,0.40478287799999996,0.40488172600000005,0.38930454899999994,0.39840976699999997,0.37436203999999995,0.403254663,0.39972172899999997,0.367000265,0.38335498900000003,0.38071974199999997,0.37239699,0.37387133599999994,0.403090854,0.379300347,0.368310311,0.42757494199999996,0.42225759599999996,0.399490832,0.397277714,0.40928267,0.40961052900000006,0.41132815500000003,0.41213996500000005,0.39779924499999997,0.368554731,0.381944337,0.39534932799999994,0.372424213,0.383341329,0.390528036,0.38674065199999996,0.384988112,0.395085553,0.397604078,0.37784972899999997,0.368179329,0.38261841,0.380305228,0.38306255100000003,0.405559555,0.40435485400000004,0.37272533599999996,0.36829441899999993,0.38347594399999996,0.391501747,0.37551084,0.392903689,0.390753194,0.382350329,0.3765863,0.389026396,0.395280611,0.364168138,0.3863965,0.42391856299999997,0.38914929099999995,0.390374252,0.39119624599999997,0.39687321400000003,0.38092002799999997,0.40504982399999995,0.386261105,0.389725274,0.39145986099999996,0.39382017199999997,0.37897259400000005,0.41031226600000004,0.38097164899999997,0.4017314679999999,0.40845605999999995,0.41107470700000004,0.399742759,0.37337386200000006,0.400957975,0.433031573,0.38349437599999997,0.42010263299999995,0.374211308,0.39781618199999996,0.389482042,0.37936130100000004,0.351297864,0.349898295,0.400602569,0.402460247,0.386192005,0.38307905900000006,0.38460985500000006,0.37427691900000004,0.37738441,0.35418680599999997,0.39532613,0.40477046800000005,0.39529812599999997,0.393124581,0.401975945,0.40280366100000004,0.37868713200000004,0.387854276,0.362657496,0.345138288,0.379322663,0.39963610600000005,0.388064473,0.406620963,0.405004434,0.403651004,0.39842262100000003,0.389537374,0.383068741,0.39016748000000007,0.404021273,0.39360083399999996,0.389490182,0.419215438,0.403158972,0.38552801900000006,0.366941482,0.404008037,0.409337058,0.363767806,0.398706834,0.404882869,0.41305208699999996,0.38207098399999995,0.39144827800000004,0.376373712,0.407903423,0.38123364800000004,0.388626974,0.382929734,0.43228336800000006,0.390710001,0.375425703,0.39111832799999996,0.38901740700000004,0.387558138,0.385151119,0.360589778,0.3787234,0.387206699,0.36821350599999997,0.371050632,0.36692729300000004,0.3646610880000001,0.413911719,0.372925523,0.37650902,0.434396563,0.389974059,0.401653376,0.402096561,0.384973761,0.380915209,0.37998118599999997,0.410619507,0.396256973,0.382137586,0.41713465800000005,0.349082835,0.39676511700000006,0.38709369899999996,0.38264387499999997,0.395932597,0.38177372000000004,0.394708945,0.398688421,0.41133585800000005,0.382959229,0.394120451,0.37577060900000003,0.40567002299999994,0.38912911299999997,0.374715095,0.390499396,0.361579035,0.38644211100000003,0.378033099,0.40019224600000003,0.372162983,0.394282641,0.379021852,0.38191076399999996,0.37404463000000004,0.373757648,0.405884329,0.370800591,0.41013747899999997,0.367741812,0.37127518600000003,0.41144480499999997,0.360548584,0.395249914,0.40123202699999994,0.42831275199999996,0.394822238,0.40485284399999993,0.41990372000000004,0.377344946,0.389101044,0.39075064,0.394567966,0.37929391399999995,0.368867842,0.357735791,0.392314706,0.362069491,0.395089112,0.38222407399999997,0.41254172800000005,0.42021903999999993,0.38553918600000003,0.386447607,0.37420243200000003,0.375868278,0.38789462599999996,0.409316799,0.397277237,0.427177409,0.4056032520000001,0.357732213,0.376366191,0.400316997,0.406904439,0.354077766,0.38349210800000005,0.380489724,0.395445907,0.389224026,0.41053352000000004,0.376031176,0.401481017,0.36854762599999996,0.408804971,0.3942489800000001,0.41590662999999994,0.39095839,0.403408848,0.40877478300000003,0.41210418200000004,0.39929757,0.40337156199999996,0.399661932,0.37361485200000005,0.397564868,0.38809773200000003,0.362085558,0.39604943800000003,0.383710208,0.378487111,0.401198681,0.399421087,0.393073618,0.418199281,0.379267065,0.395574147,0.385173992,0.39709192800000004,0.400071536,0.36117752099999995,0.403979887,0.39972796800000004,0.40414264199999994,0.384518711,0.39092828500000004,0.379219132,0.37398886800000003,0.38153250799999994,0.395111347,0.4055880539999999,0.37433545199999996,0.405600538,0.38562471899999995,0.410466919,0.39140154400000005,0.386668024,0.393715534,0.40307394399999996,0.411142314,0.392096546,0.39471568,0.382317287,0.407964699,0.3784986739999999,0.40162927000000004,0.39282578900000004,0.38017254500000003,0.391306998,0.428528831,0.396440408,0.37653623799999997,0.415151444,0.403615811,0.38626965500000005,0.36641580000000007,0.39345084,0.38445739,0.373628023,0.38657241400000003,0.383218901,0.37721768199999994,0.37823857000000005,0.38390616,0.399557043,0.3950908699999999,0.410574548,0.363314538,0.381568962,0.40455066900000003,0.392863291,0.404087284,0.38533291500000005,0.380769914,0.39189346799999997,0.411285824,0.382901166,0.420159718,0.403602129,0.36505614,0.3913659460000001,0.398502322,0.381806331,0.40710651000000003,0.400610402,0.39499084200000006,0.39159394999999997,0.36228911199999997,0.39231278499999994,0.39223512899999996,0.36640964900000006,0.395044018,0.37912798600000003,0.396193158,0.40353103199999996,0.38159137699999995,0.400135795,0.386973792,0.378072506,0.389604679,0.372223753,0.352090702,0.391697741,0.377868852,0.38173639600000003,0.388862212,0.37023832700000003,0.417849055,0.389134253,0.380680386,0.375306255,0.388084857,0.422267328,0.393599358,0.39136129900000005,0.395549868,0.39652475000000004,0.40566414399999995,0.38842607599999995,0.389795163,0.370150624,0.379311691,0.3819311829999999,0.38427551400000004,0.39092078899999994,0.417053501,0.377303781,0.36670627199999994,0.40965581199999995,0.37845616199999993,0.40574446399999997,0.40290639999999994,0.40109993799999993,0.393867048,0.36524970400000006,0.41076289599999993,0.374864099,0.36266882399999995,0.363525711,0.37753034,0.397259059,0.386024192,0.391091771,0.388162228,0.397703767,0.392337197,0.357814379,0.38754209900000003,0.365808942,0.381447884,0.36648032599999997,0.411501853,0.367660935,0.39394371,0.387181889,0.392191328,0.40178301,0.365622914,0.36229967199999996,0.40127416699999996,0.39424604399999996,0.40969139500000007,0.37095925799999996,0.390058693,0.384296904,0.417123067,0.38175670800000006,0.39878774499999997,0.379640308,0.445281844,0.389126036,0.37383434899999995,0.41628192399999997,0.408549197,0.41721376099999996,0.39961456900000003,0.40615855300000003,0.38495763400000005,0.392775375,0.37921220499999997,0.413844564,0.40172498300000004,0.37864684899999995,0.402450322,0.361169863,0.38912751399999995,0.40912957599999994,0.396086128,0.39814432,0.40137169800000005,0.36485855700000003,0.381033396,0.402609607,0.370385068,0.42062898399999993,0.386691824,0.35369660300000005,0.392292778,0.39043692199999996,0.386576294,0.37054139899999994,0.37435949500000004,0.373446536,0.366190416,0.37077754900000004,0.37915335299999997,0.370045568,0.383354561,0.39868647100000004,0.396217484,0.389255215,0.41621147499999994,0.379898523,0.39447686,0.38909630599999995,0.370446906,0.385576074,0.40021252100000004,0.39234599400000003,0.40089630699999995,0.385027655,0.400917508,0.374828848,0.420610349,0.36022723300000004,0.406190645,0.38830324800000005,0.39087327400000005,0.40375280399999997,0.373553573,0.41152645800000004,0.383422601,0.400496101,0.412506786,0.399674487,0.40605941599999995,0.37886349,0.397169798,0.379568581,0.37045706100000003,0.40374434299999995,0.37642180099999994,0.36843467900000004,0.428132575,0.40287964899999995,0.405206341,0.388666517,0.40576320499999996,0.379900425,0.37387951699999994,0.40077513699999995,0.402402502,0.412111286,0.399741438,0.37141884500000005,0.399222701,0.392376672,0.386250794,0.381173703,0.38494667600000004,0.372980227,0.404271166,0.377763245,0.392710028,0.40451876400000003\nMode avg_node_weight_complete\nMode avg_node_weight_complete Iteration 0\nMode avg_node_weight_complete Iteration 50\nMode avg_node_weight_complete Iteration 100\nMode avg_node_weight_complete Iteration 150\nMode avg_node_weight_complete Iteration 200\nMode avg_node_weight_complete Iteration 250\nMode avg_node_weight_complete Iteration 300\nMode avg_node_weight_complete Iteration 350\nMode avg_node_weight_complete Iteration 400\nMode avg_node_weight_complete Iteration 450\nMode avg_node_weight_complete Iteration 500\nMode avg_node_weight_complete Iteration 550\nMode avg_node_weight_complete Iteration 600\nMode avg_node_weight_complete Iteration 650\nMode avg_node_weight_complete Iteration 700\nMode avg_node_weight_complete Iteration 750\nMode avg_node_weight_complete Iteration 800\nMode avg_node_weight_complete Iteration 850\nMode avg_node_weight_complete Iteration 900\nMode avg_node_weight_complete Iteration 950\nEC_PM,avg_node_weight_complete,210.046,29.2825184026,274,178,219,246,226,176,227,193,217,262,219,219,259,222,231,205,220,203,229,204,214,207,204,243,236,241,216,218,224,208,161,269,210,181,191,229,208,177,248,185,211,206,221,146,193,217,160,164,236,131,218,225,257,190,228,207,214,192,194,226,183,220,183,142,205,168,255,223,235,190,184,188,210,214,220,224,251,193,235,204,225,216,212,228,290,196,211,195,283,157,246,168,242,191,208,209,220,149,207,216,222,226,211,282,193,181,228,202,241,232,222,201,183,191,209,232,207,251,215,217,234,178,245,219,183,234,205,170,182,179,213,221,271,182,203,226,178,204,185,253,217,235,181,193,228,209,243,168,225,190,245,211,193,235,242,223,215,243,253,172,247,189,251,268,257,211,262,225,240,186,243,200,200,179,243,215,251,205,202,197,256,185,199,189,241,218,184,146,185,216,260,229,169,242,252,256,194,147,220,220,188,237,191,236,237,221,208,179,195,265,215,221,207,230,221,211,208,231,218,198,235,271,204,194,201,202,178,234,188,235,198,230,160,202,213,194,180,167,173,208,166,180,167,193,183,210,224,268,158,160,189,212,211,228,188,156,191,178,242,210,186,254,187,217,183,232,215,175,215,231,239,213,221,235,238,230,229,165,194,170,207,226,206,229,282,192,224,197,205,204,199,219,243,189,197,165,205,184,226,154,223,225,256,197,253,249,190,201,240,225,220,206,152,218,264,203,236,188,235,264,201,224,253,210,182,208,248,196,223,199,203,203,231,222,166,198,143,193,199,255,179,231,173,217,214,297,194,215,191,233,234,179,210,186,216,160,203,250,160,226,196,210,205,264,183,278,217,227,269,223,272,177,200,233,225,185,223,232,255,226,197,234,206,191,226,215,228,176,181,185,229,226,187,187,214,180,171,228,167,245,213,208,217,163,204,220,180,203,209,241,199,254,198,191,191,164,185,173,226,174,212,176,222,238,187,217,204,221,214,203,190,196,214,196,214,191,219,195,167,215,189,212,213,195,206,228,274,210,197,216,226,208,188,170,215,181,175,203,199,213,186,193,246,205,234,230,250,156,186,191,195,230,218,220,210,213,191,219,201,149,198,201,206,208,211,218,214,212,236,180,209,218,195,183,210,225,189,185,216,179,201,187,227,209,169,168,183,217,224,260,244,208,158,239,195,233,204,154,134,166,199,221,203,191,266,175,176,234,243,231,262,174,157,219,207,219,213,204,197,209,243,200,191,209,223,181,196,163,165,188,235,164,217,229,171,179,304,249,230,169,239,250,136,201,247,173,164,207,194,248,201,199,182,212,204,199,243,260,215,142,173,184,257,228,257,289,228,209,191,254,175,210,248,215,257,262,234,203,164,252,231,203,184,203,209,222,256,243,188,169,250,257,225,203,239,200,257,149,239,177,190,189,185,145,233,155,237,224,179,240,244,164,226,254,216,175,228,228,143,250,212,157,214,189,193,223,189,221,184,227,165,265,204,185,195,213,199,207,243,241,262,208,163,231,200,223,243,186,191,241,251,211,206,241,254,184,184,221,243,277,244,212,244,263,264,216,236,249,205,222,206,204,225,180,182,179,246,261,196,202,240,194,236,214,180,208,205,218,210,184,181,205,204,171,223,222,210,202,203,218,286,215,198,207,184,190,173,273,213,210,230,204,234,224,184,233,166,214,215,183,170,242,179,180,158,179,173,254,196,208,191,219,180,167,214,190,184,195,213,212,206,191,143,186,229,232,239,255,210,189,212,199,207,241,192,191,228,250,249,179,215,182,217,231,170,268,245,238,200,199,206,254,196,242,231,229,226,224,219,202,145,198,247,191,173,194,238,248,238,227,220,186,242,231,250,237,206,175,182,179,195,187,180,179,147,243,170,192,232,197,214,196,223,184,232,253,196,223,208,256,287,209,230,195,212,179,252,161,148,265,179,243,146,249,170,233,241,132,234,196,222,199,241,189,197,191,209,212,172,164,191,226,213,237,257,215,167,158,194,208,242,208,158,262,237,179,157,233,183,218,206,190,187,217,225,172,199,205,178,202,178,221,212,221,218,210,192,223,210,266,204,145,183,205,201,212,188,220,205,262,213,162,214,189,263,195,215,238,195,261,147,210,227,207,254,223,198,267,263,233,194,182,221,283,201,229,215,233,196,221,222,135,216,158,197,178,159,232,192,173,267,205,199,208,206,197,164,214,188,191,244,191,208,237,244,276,240,279,183,256,154,172,223,199,216,172,235,233,219,212,205,201,210,249,216,209,257,185,187,237\nTCV_PM,avg_node_weight_complete,240.08,25.452850528,281,218,241,246,272,211,263,246,254,262,243,247,277,254,238,245,262,218,257,219,258,246,234,268,257,253,239,249,269,240,208,279,237,214,232,261,219,196,274,234,225,245,260,204,218,245,196,193,267,166,247,261,274,221,259,245,247,220,233,256,238,256,222,174,220,210,285,265,270,229,218,246,250,241,248,251,277,226,270,209,245,250,214,263,307,227,250,212,284,196,272,218,255,224,242,252,247,202,231,254,244,259,248,292,241,211,257,237,287,244,266,235,218,223,255,261,245,264,256,229,239,208,269,252,216,262,241,221,216,210,247,271,259,212,240,257,201,228,230,284,246,277,232,240,267,252,270,213,255,236,286,236,255,277,251,228,246,257,258,202,299,220,264,294,291,252,274,241,267,211,260,222,238,217,261,242,281,239,242,230,260,210,234,223,262,257,228,173,206,256,265,250,200,256,289,287,230,183,239,239,224,257,228,264,246,253,243,210,225,279,245,259,265,248,235,246,245,281,227,217,247,291,222,220,235,217,230,259,225,263,240,254,201,240,236,236,236,195,226,229,222,225,199,215,199,247,244,277,194,199,233,237,236,256,234,218,223,213,266,247,234,284,223,246,217,254,263,196,237,257,261,235,250,210,262,263,280,182,233,206,242,240,240,226,319,241,268,226,237,246,217,256,261,221,237,215,221,217,261,213,264,259,274,239,259,280,219,232,252,238,251,235,186,252,267,240,272,204,272,293,242,249,281,224,222,225,265,228,227,241,230,240,273,263,192,234,187,218,227,268,211,262,224,256,264,299,205,263,223,246,245,215,243,221,245,198,239,266,210,269,236,250,235,281,213,285,264,275,292,251,269,206,230,262,244,206,250,249,264,246,246,257,243,225,264,253,257,210,211,204,240,251,221,211,270,215,201,267,177,268,242,226,238,195,214,254,218,231,244,269,230,260,230,232,200,184,226,209,246,195,248,217,259,264,223,263,247,226,246,223,217,226,234,242,261,232,227,236,202,253,223,268,252,234,227,248,303,222,248,238,259,226,228,212,262,227,225,224,237,241,213,224,278,248,234,236,265,199,225,220,232,274,276,261,238,228,227,245,227,183,240,241,248,239,243,235,263,233,255,227,237,260,235,219,230,254,231,230,243,213,227,210,243,251,216,197,234,248,270,252,226,228,200,254,227,252,244,178,172,207,241,237,225,241,278,204,224,274,260,263,288,201,204,254,221,262,243,221,228,240,285,247,231,256,263,212,216,204,215,218,254,186,245,253,211,226,283,282,252,186,238,280,184,244,254,213,199,223,208,264,233,214,217,248,231,238,261,282,225,184,208,228,292,260,290,293,255,252,227,261,223,238,278,255,267,263,251,230,198,280,260,222,231,227,230,245,270,262,204,206,269,260,232,224,257,225,286,191,273,221,234,241,218,199,261,207,257,252,205,276,282,199,248,261,235,216,258,263,186,265,257,198,256,217,227,247,240,251,218,250,199,258,226,226,222,242,252,228,265,279,301,250,211,243,219,231,289,220,234,268,275,248,241,263,258,219,219,243,271,276,295,257,248,283,302,273,268,274,239,246,256,253,243,228,218,221,278,280,236,251,261,221,268,235,223,243,257,243,246,229,202,245,239,217,257,245,235,244,244,245,276,251,220,214,230,220,198,303,270,245,260,235,259,251,228,237,199,252,246,197,201,255,233,232,193,213,216,268,226,247,235,239,207,213,241,228,225,229,264,246,219,206,185,223,264,247,285,292,238,230,250,232,238,258,200,226,260,266,274,213,242,227,236,273,200,270,258,268,243,223,262,277,242,266,258,242,254,244,264,228,181,217,267,236,218,231,270,269,269,253,280,216,251,261,284,252,233,211,200,211,220,220,220,204,188,277,212,218,253,229,246,237,245,227,247,290,218,251,251,285,287,222,256,215,249,228,289,197,184,278,203,261,179,280,195,254,264,175,250,234,273,209,284,226,229,225,242,236,219,199,227,257,253,244,281,245,201,191,225,227,270,247,208,275,251,206,206,261,214,259,248,236,222,241,243,207,239,236,205,234,224,256,264,253,237,231,222,222,235,277,252,199,212,241,239,243,208,255,211,287,256,200,235,216,299,235,252,259,202,266,184,222,260,229,285,244,242,282,289,250,231,222,262,306,229,268,244,243,234,265,246,189,248,195,218,199,199,265,223,211,272,233,224,233,244,224,196,247,233,237,271,213,229,276,272,290,267,294,202,256,190,209,249,240,242,210,255,252,220,249,238,222,236,256,241,239,265,207,215,251\nLONELINESS,avg_node_weight_complete,0.854197119417,0.00339693899678,0.849124962931,0.854470843728,0.851034963757,0.855217128406,0.851072390994,0.849824094859,0.851600114536,0.858232997796,0.856522556403,0.847813835299,0.851773551966,0.853774190941,0.851655101254,0.855318601328,0.854630544487,0.857042329686,0.852759307316,0.843211201545,0.854284291436,0.856629735986,0.8501457599,0.847477184696,0.850751705682,0.850548757988,0.856958753532,0.854003927774,0.848583595981,0.847519204067,0.84606064249,0.857417209266,0.855444896361,0.857921723967,0.85211021703,0.862566067607,0.854039951332,0.851303288623,0.855409313687,0.849927626777,0.849753327175,0.854488629702,0.850930457727,0.852507612724,0.85314809736,0.855240509749,0.852194431117,0.854686968287,0.859071331609,0.857629519862,0.849109145254,0.85858275496,0.852884930232,0.852752307062,0.853443331287,0.854407193327,0.854305477369,0.855782376605,0.847393687201,0.856673247564,0.85448757259,0.851735288335,0.854781206105,0.850702627441,0.852030943396,0.857356816195,0.856938696626,0.852820763136,0.851893577387,0.845019383787,0.849906604079,0.852902985656,0.847673634487,0.847045194833,0.855449516789,0.852505394673,0.852990568452,0.851887596508,0.850255319033,0.854946874689,0.849398669271,0.853827696417,0.854621361907,0.855634905691,0.852273457074,0.853295544529,0.845893375097,0.85110963702,0.856995345794,0.855430419884,0.850326804916,0.856146296452,0.854782013609,0.851806868738,0.857079139315,0.854490150457,0.857355155577,0.85818300567,0.860590792944,0.855190499577,0.851284573615,0.852206178309,0.857229509268,0.862342114735,0.853366812702,0.855845774965,0.84508468688,0.854877712887,0.856627219984,0.853822164025,0.855817808212,0.860188536578,0.856635247152,0.853183583298,0.85431042518,0.8622047065,0.851989796351,0.856167151406,0.852741054558,0.852263119813,0.851259981635,0.856133112767,0.855401869981,0.86172351243,0.854915237588,0.849947825724,0.852607790278,0.85962829646,0.8536061026,0.859610176409,0.853694519789,0.857562276181,0.856046314649,0.853548710695,0.851491422855,0.857963021675,0.856107337927,0.854853293017,0.853771223266,0.85814014954,0.862880010141,0.854366301056,0.851728176861,0.856464924978,0.861805135585,0.854910147196,0.850520916368,0.854067033386,0.851532789527,0.854632376213,0.8535581644,0.84887107555,0.853234020789,0.858147718134,0.850156099069,0.855917457184,0.848998656906,0.859013698277,0.849611105459,0.849592230694,0.852216582456,0.859793173927,0.852581587429,0.853821036109,0.8554974539,0.849911432147,0.846093436713,0.852294763754,0.848948619046,0.854637599264,0.852133717124,0.856878358601,0.857235680914,0.859116644185,0.854388764969,0.859356308848,0.851657673688,0.858565792455,0.851989339211,0.849959045702,0.855301628849,0.857806966416,0.848450681448,0.864561690859,0.852474284186,0.855556076248,0.857456362773,0.853676348501,0.854019200287,0.864230777199,0.85910750453,0.853377635346,0.852279538381,0.855733886802,0.862606521703,0.86091045927,0.853271958806,0.850866983871,0.857514969991,0.855540262248,0.848097989454,0.85287339044,0.85317129781,0.855402519752,0.849704036205,0.855522788485,0.850174910618,0.852345828391,0.856533567448,0.857823039231,0.852665752321,0.853735099548,0.852611444812,0.853426599027,0.855930524861,0.852884980915,0.856390844389,0.85706772805,0.851636895184,0.850745833395,0.857560152613,0.861231881992,0.85441646484,0.851874384949,0.85047446694,0.859945612976,0.855913792171,0.854378244196,0.858048850662,0.846698915664,0.849583179075,0.849785116397,0.855898914846,0.856349313075,0.859250732928,0.855381942442,0.862223664562,0.855992965876,0.850684631533,0.86411984417,0.854072544892,0.852304077965,0.853661118085,0.854879201274,0.855013690146,0.854998611826,0.858289415931,0.858721863011,0.855114033932,0.858066677525,0.857296606427,0.857335315164,0.853646392716,0.857953525441,0.851701648849,0.850740380907,0.858403985262,0.853321298492,0.856297844754,0.858711703821,0.848983762827,0.851721520271,0.85200411304,0.851808444899,0.856687892374,0.856304237827,0.856631966574,0.853910594172,0.850790100739,0.859066491298,0.854223394238,0.854741897864,0.852576516952,0.859022908692,0.857890263148,0.858035402312,0.845701432209,0.855378785737,0.850945129772,0.854514511771,0.856458469771,0.856545979822,0.853425497805,0.850147624136,0.85145684005,0.85136096452,0.852778771839,0.854226365739,0.852387366295,0.84790491143,0.854165398488,0.853972129355,0.849924329723,0.851532722636,0.858154478913,0.858468925312,0.85809035494,0.858774763134,0.856354138117,0.85399916334,0.855388333635,0.853508527036,0.853691267053,0.853338608233,0.850687646128,0.853699884114,0.854786243919,0.850379072881,0.854779801928,0.859378705295,0.859510657275,0.852107847022,0.857704122885,0.862370215043,0.855952010948,0.85194472258,0.854655294821,0.848806659025,0.857607228116,0.853081765644,0.854771592534,0.855953601748,0.849701354775,0.850535904645,0.853291101322,0.855819330278,0.854244746876,0.849907817141,0.855834911254,0.855752290124,0.855533212457,0.859721777687,0.857311589479,0.848509221672,0.848950473415,0.854297623208,0.851650075583,0.858231718967,0.855850336483,0.858029393731,0.854033570785,0.853031548666,0.857972618516,0.845447477085,0.855463678129,0.850760444555,0.853548641033,0.853062440808,0.858586246213,0.857866299336,0.856208839691,0.851675707637,0.860154774745,0.856264793957,0.858110750871,0.851712748759,0.859306759686,0.856016803932,0.857417676919,0.855011497958,0.859024955744,0.852733458859,0.850744681212,0.849922764403,0.854586514224,0.852155699074,0.864450561514,0.85336458009,0.856018938091,0.849446777923,0.853702086868,0.847500031325,0.853058123886,0.857444306913,0.863354371559,0.855455044798,0.86146979572,0.856427699541,0.851150646354,0.855938664519,0.845584721527,0.852924326147,0.854444004369,0.850487509393,0.852278884745,0.854520080275,0.848673167806,0.849670219187,0.850509340864,0.860719454933,0.858156865911,0.856627870002,0.858852557373,0.854732752136,0.858320745302,0.853540843144,0.848810000971,0.857120821086,0.85986711654,0.859886610251,0.856392347408,0.851157129728,0.852340570998,0.854094420961,0.853030029472,0.855617182556,0.851887832344,0.853375122028,0.856192717588,0.851590342464,0.858055168713,0.850939297088,0.856984255887,0.852752080506,0.855051969038,0.853735901635,0.856818729576,0.862473932186,0.854638785919,0.853106335224,0.857546219192,0.85013660444,0.85355349557,0.853620375267,0.857203552894,0.85224618666,0.856131011605,0.856155395243,0.853877916364,0.854079672588,0.851741704133,0.86061743797,0.852481609008,0.852813963098,0.855250148016,0.85094535547,0.855810983909,0.854852694723,0.851951299844,0.85255529272,0.859142795489,0.848371394588,0.853781543859,0.852024402372,0.852760080386,0.849404784994,0.856198494884,0.849705881378,0.85166896379,0.854994677092,0.855538934124,0.852913886682,0.857749507998,0.854051306252,0.851694630587,0.859806954869,0.854367985936,0.858478452369,0.855702995852,0.858387382665,0.854857285629,0.851888449256,0.851848231499,0.852208013834,0.843903316329,0.851628708958,0.854111143536,0.854780049288,0.853595836271,0.854214319115,0.858592674567,0.853842508174,0.855062389616,0.85221384839,0.852201259196,0.850771319096,0.852688362946,0.855958904899,0.851652229647,0.85488133856,0.856713598685,0.852921396387,0.853923053388,0.852229190431,0.856594608437,0.856923259949,0.854249704993,0.856406367799,0.853373060305,0.851312064129,0.853376051404,0.856634686516,0.852909596173,0.855917267474,0.854592410178,0.853375837574,0.850471029854,0.853191644562,0.857799032787,0.858462461714,0.852368882809,0.856533527772,0.857174436946,0.856600274687,0.853206687984,0.855631561562,0.853171027497,0.855932761897,0.858080596988,0.852168453494,0.85498374339,0.85255074587,0.848125195146,0.855795862632,0.857178517325,0.848173539446,0.856578507606,0.856406982149,0.856317937466,0.857820148398,0.856261331949,0.857943134548,0.85456165743,0.855982578836,0.849957707853,0.859315215822,0.849086647853,0.846621225976,0.85615923624,0.851975894315,0.860368515884,0.851852031365,0.853600922399,0.850151331173,0.854858609531,0.854395351617,0.856572665257,0.856916806568,0.855305192366,0.859761517903,0.857434960133,0.855514116877,0.856635730553,0.853693873526,0.856113485556,0.853439650091,0.85021331013,0.856074158201,0.858555485916,0.857172829358,0.853946733909,0.852767969414,0.852291173107,0.854015487526,0.854764262554,0.856374450407,0.855156335284,0.854607475387,0.851252171514,0.851322477269,0.85463488328,0.855841508632,0.857759340874,0.844916459728,0.858198686653,0.856829632221,0.850831465583,0.852907532195,0.855553090941,0.856688442158,0.851979474279,0.8468057962,0.851990003951,0.850052195642,0.858688735406,0.852565048976,0.856380296485,0.849882469253,0.850640080415,0.851856513344,0.852084322696,0.858322045531,0.853875602224,0.857159834524,0.85001052466,0.853177862967,0.853912827195,0.857726837121,0.851208811676,0.85682420677,0.856647427414,0.855139937077,0.861451439901,0.855234235474,0.856521053344,0.850926154293,0.853263647186,0.85167221933,0.849219987807,0.85758686094,0.850685505118,0.851748526014,0.857321659539,0.853138423439,0.851417814138,0.854895682593,0.854623195138,0.855846384121,0.851424001643,0.856347068599,0.850905166782,0.854222931777,0.851851303228,0.854111218965,0.855511385927,0.854771515772,0.848079770707,0.85726472654,0.849613571223,0.858668968154,0.85169888445,0.85317356909,0.854197293054,0.854291357694,0.855790689795,0.854302829913,0.85628264537,0.85738366643,0.85148775931,0.855343352044,0.857549471855,0.844408610394,0.853518707464,0.85098104708,0.8544464111,0.854713756393,0.85287047073,0.855520982453,0.852974611714,0.859691820396,0.853486816597,0.851792170334,0.850900085467,0.863756374947,0.854804404836,0.853603002417,0.852843867279,0.854342182696,0.850910326296,0.851866405841,0.857137488852,0.854607369403,0.8543359995,0.852889892291,0.855725406563,0.856730365497,0.855400084003,0.848022648937,0.856115335208,0.859089580297,0.849407589009,0.853966255156,0.848066716917,0.857931356989,0.853840619068,0.85291463269,0.859949718501,0.855149989501,0.850847148144,0.851929511837,0.858491091331,0.853367442925,0.850537128806,0.855620445616,0.852651722667,0.856681127036,0.854204104097,0.85306522673,0.85788046605,0.856087541306,0.85076683922,0.848697928616,0.846224294897,0.85441025811,0.853483591406,0.856431781532,0.850848719819,0.847014525867,0.85564622799,0.854989166888,0.85331940126,0.85159476228,0.86085506218,0.851739221032,0.85348775602,0.855430497776,0.855389812622,0.855227066115,0.849957445885,0.854181880844,0.852927930258,0.854382390793,0.851531333863,0.855857246009,0.84908017584,0.854700647907,0.856537286778,0.853115830716,0.855981773176,0.85686266224,0.858105098973,0.853054515885,0.854425459616,0.855689432858,0.858795719598,0.857074089161,0.856210231733,0.856455901012,0.849721511137,0.856039835902,0.858207607885,0.856492437149,0.855301165728,0.849796074895,0.857079601427,0.852857556419,0.852183192243,0.852594150431,0.861113223127,0.852380681842,0.850829754402,0.8567785287,0.854316223046,0.857466723741,0.85489430579,0.856496029167,0.855528928164,0.855959046275,0.852729108773,0.859739116261,0.845285611427,0.856816279173,0.855338098748,0.85777993098,0.847119815425,0.858577391802,0.856097513036,0.852982658537,0.851964290992,0.849990879965,0.855693144749,0.848963749856,0.855751843651,0.860878874843,0.8541790394,0.854193595233,0.85317525233,0.852506552658,0.851221301493,0.851042162808,0.85302038923,0.856578631446,0.854795050419,0.857011058055,0.858324301752,0.851179623327,0.854897676676,0.856300049245,0.84512134638,0.848324674266,0.854301300764,0.855581405491,0.854604467518,0.853609991151,0.854448270425,0.84879218299,0.85433987423,0.855521715167,0.855942994176,0.850226371017,0.849991689282,0.854160135375,0.860359932476,0.855974023825,0.854899661598,0.852382214931,0.855024682929,0.853491862545,0.850309380964,0.852381200131,0.85539184733,0.85287842811,0.850709470524,0.858136651277,0.856012297339,0.852916238286,0.849722148831,0.852513677099,0.849456351625,0.854250947524,0.849603918113,0.850387919577,0.853680819029,0.85743585921,0.849973834256,0.853257023081,0.850460134501,0.857134565981,0.855784263325,0.856448993143,0.857068504396,0.85893814717,0.850211237603,0.857137860236,0.854113221779,0.856233359398,0.842350059249,0.854946321199,0.857963196064,0.857886108756,0.858022583923,0.856287966604,0.854058398639,0.860554272973,0.852750280041,0.851623509848,0.859883904694,0.849592390946,0.856321576416,0.851028118896,0.851149326447,0.853106960468,0.849094135266,0.854608177317,0.853160981253,0.852800414923,0.854650898614,0.853662242032,0.850824858728,0.850510198653,0.849163426565,0.85215492769,0.857455894145,0.857169242814,0.852706632963,0.855501482142,0.852968208987,0.852196501868,0.854335805758,0.854138989465,0.859657478392,0.850043642333,0.853900028484,0.854496113347,0.855945567586,0.849600448407,0.857769494235,0.854819199944,0.853623757281,0.858521260442,0.855601361537,0.856776375572,0.851083803364,0.854972359145,0.8502272696,0.853743106435,0.856199710348,0.855815526248,0.856736725416,0.851087442336,0.856038432277,0.851955401972,0.851807871009,0.853599244666,0.848668167902,0.851898445254,0.852284623558,0.855532853594,0.854838690741,0.852071062571,0.855634468095,0.851394154107,0.852300359105,0.847525582572,0.856594901018,0.853171104347,0.849909170877,0.858535630076,0.859174793098,0.858472368913,0.861165555685,0.852272501308,0.853200726575,0.852838010254,0.858394813332,0.854936879418,0.854597166954,0.857335473366,0.85684250137,0.852563062152,0.86015792308,0.853415616228,0.852447581764,0.854341807121,0.851209139903,0.853379369702,0.85377434248,0.856834650243,0.852366350455,0.854905711612,0.858742218,0.854498388481,0.858677281316,0.859759319907,0.849946434671,0.852950990788,0.854027495381,0.856062799178,0.858193327717,0.849929553842,0.857219494159,0.849816992057,0.851397530355,0.852362990677,0.854286803293,0.852575396839,0.856787761028,0.854219923836,0.858932881919,0.851738310508,0.854130649332,0.854698237631,0.855975130967,0.849751841233,0.85578540666,0.853322953726,0.853805696944,0.856519455495,0.850275254371,0.857128735007,0.855385398517,0.85172990334,0.852652561904,0.854438641262,0.855160748397,0.852004108255,0.856632018147,0.852769200288,0.85886940586,0.850108915763,0.852133658653,0.848346357157,0.847772793076,0.859211003745,0.859229890509,0.856380725366,0.849128741276,0.854453154129,0.858858620817,0.851614003569,0.852943874502,0.855101185496,0.855684736979,0.858831127472,0.854691554429,0.857752569558,0.857845633007,0.85478327709,0.862496997674,0.858590133939,0.862505017787,0.856334489008,0.854674942834,0.850957160035,0.851228983827,0.845278642307,0.854885542789,0.851872107232,0.849089304969,0.846713439999,0.860900643585,0.853665379071,0.859033712222,0.856927626825,0.855531629124,0.854072630321,0.854616120471,0.857177458534,0.847917091622,0.853374449407,0.85662718998,0.853311685099,0.854702105887,0.852486660051,0.852514543508,0.85114893698,0.855644667713,0.848848372474,0.854230642381,0.845922303156,0.855159550546,0.85737376128\nQDS_PM,avg_node_weight_complete,0.377521056248,0.0131247826752,0.348668272266848,0.3639767531330125,0.3686436654997174,0.3438112950239763,0.38237227518153966,0.35755599505170926,0.3785695922524195,0.3952586049407947,0.37745885241321514,0.372347357319004,0.37977003642232865,0.3929024378614653,0.3665805802779816,0.3755483056753947,0.38367642400518753,0.3855658854085192,0.37215136279628536,0.37705641175172233,0.379815093224857,0.37096159985181865,0.378333862260027,0.39003950144383515,0.38063747642012563,0.37208813281610353,0.37883610902092785,0.3765322829352424,0.37872432966249286,0.3631474995898145,0.40606021547723425,0.38842815481218446,0.37307850444478313,0.3903095056083965,0.3619431593417639,0.38268990007515136,0.3642673699483826,0.3619240368448536,0.40178150032157034,0.37133297091398515,0.35452737834483405,0.3940681362836834,0.36183304581995085,0.40356046747051655,0.38444141602115606,0.40250740175348193,0.3693955393471497,0.33362513477095546,0.3764074802672651,0.37975398778497205,0.3645129864497599,0.3600974890725804,0.36932751852541085,0.3641637455675269,0.3619343610753064,0.3932780918141981,0.37379956167152395,0.3764036298989831,0.3886929261087466,0.3881872036613993,0.3585261198192286,0.36731371786323475,0.366448657603491,0.3892646220744963,0.3747144603384071,0.358623372150697,0.38765956611448704,0.3465404672717622,0.383991316031806,0.35369534468120895,0.39456772763433284,0.368969314984233,0.3678168942061483,0.37048319459967616,0.35681679007910816,0.37970605171163807,0.3810408725400304,0.3722905243643562,0.37454933906659726,0.39048385828784554,0.3582965651840347,0.36503233065017826,0.38168367621125365,0.3803086803284629,0.3634434314569538,0.3979094613467937,0.364590616461012,0.3567134528582394,0.3826364734697191,0.38374883449870695,0.40149064110827615,0.38127317601073946,0.3915300984969977,0.38060184736931546,0.3867457975508471,0.4043038961324902,0.35866921510036887,0.3761610664438169,0.3942274529898072,0.36529695215914165,0.39542332753155884,0.37759521283677044,0.389584454762887,0.35936804808749073,0.3589630492049928,0.3985300369315484,0.3789618317881594,0.40572991069246406,0.3653411292258581,0.3589758414897083,0.3837621520563936,0.3810265352670948,0.36124699480029837,0.3584749165725019,0.38247660120193655,0.3684686408668218,0.39907923066061407,0.38388381637655117,0.36302723037941514,0.3781475767864816,0.3858179969724095,0.38003040604780874,0.3807803461892464,0.3642748127224318,0.3617007165328191,0.36220133318865394,0.3597774617673809,0.377072614611767,0.40104677743348793,0.3809455331630832,0.3968805252492964,0.3720663304099743,0.37955263625256147,0.368576843038363,0.3810635179078369,0.3974203871209962,0.37149535115596954,0.3847451992800748,0.35676783957155356,0.3800998419974866,0.3829907137074574,0.38306793559653063,0.3874070408463426,0.3810254908180062,0.3840436047310416,0.3826095218073957,0.3706277636197374,0.36494759783060776,0.38873126565192057,0.3907181981511725,0.359351985523764,0.3686534064750691,0.37236107709876776,0.3892937612107549,0.37656495311644667,0.3747323808440926,0.38780933989898886,0.3826672002105711,0.3709372021253097,0.36863293031327654,0.3764173097005364,0.3755183338712294,0.40128129872353685,0.3615157085329348,0.3948329514912292,0.3825568878168474,0.39250656565942726,0.37419438836535096,0.38086519063996277,0.39648045271946764,0.3772982797194074,0.3723070103947576,0.39673881831908603,0.39288924499852046,0.3857080156706519,0.3656574262885817,0.36232942350641567,0.3601793317255828,0.37755074402656236,0.3862454715356128,0.38649860879150205,0.3437770624392475,0.3673461250508822,0.36433088325601487,0.3826449810303113,0.36153335266404524,0.38002583169194415,0.368240234372638,0.36418824317344595,0.36848358825112904,0.3742161259365098,0.36255152636461546,0.38663016760871666,0.37351824229122277,0.3781803008038354,0.3473922364312913,0.3933425410362963,0.37038879983656386,0.39029297811443153,0.3664115153938401,0.38134998924606667,0.3854228541164075,0.38730824447084294,0.37807541124876065,0.3800332954001341,0.3814913679480489,0.36280297328526645,0.3783361321704254,0.3627661311897192,0.3770953991943013,0.37138001565134826,0.3885782151223724,0.3588017409061065,0.3994701574001839,0.3806744296402782,0.37539889294804685,0.39007954341526613,0.3837588381634556,0.3775321114518033,0.3662227615599547,0.3668358455725795,0.36609496947570874,0.3604684302207137,0.37345261557380505,0.37868228270879845,0.3914297895196392,0.36783431474585504,0.3985759307974398,0.3880291485247394,0.35234724055000527,0.39427093282734105,0.37202294109849854,0.3811998770445585,0.37096717675508534,0.3807180744329956,0.3750111582057759,0.3880324152109263,0.3915484757665625,0.3672265716496797,0.3815552424121691,0.3686270496361972,0.3707993789945033,0.3783846844107669,0.39659341710813,0.38686012123244334,0.3689650806496726,0.373915756999769,0.3814964553780458,0.3880759702780102,0.36951502222706484,0.3934949157880129,0.3584102276157304,0.38593640247944366,0.37125424319066497,0.3729912326704738,0.36178736145073614,0.37451448862502373,0.38315520072869147,0.3541162235045243,0.39359001166381963,0.3933447394518463,0.3725604814758698,0.34662711400820717,0.39165389328358396,0.3804933294770028,0.3764427002292461,0.4079462487684427,0.3687176293321866,0.3745051822395102,0.3715983357435034,0.3786252116804071,0.3931896848897982,0.35472861208417256,0.3617679276687018,0.39007837597906025,0.35843694684582833,0.37303147082621063,0.38486713472664646,0.4000989367023577,0.37159294733301274,0.3852497361892141,0.3550273808158713,0.3888802738827546,0.3942960248009037,0.38391384426493147,0.3745617343781117,0.37890192134330997,0.3685118473770596,0.37304350882388587,0.3905386431129882,0.372168705382189,0.3647969816802098,0.3653505703100943,0.377405007711994,0.3825045774539076,0.37135768182895,0.3580017148822021,0.39830872763094133,0.39392809667855455,0.37138737796233806,0.3947415093544264,0.3982287061212907,0.3718498335602428,0.3771567835463146,0.38967786149577344,0.39215820203351653,0.3746178282278133,0.37972700771406037,0.38590768273599957,0.40309835509790326,0.37546601925809636,0.3498868280635793,0.3616224376466099,0.3744896277738184,0.3643634384321338,0.3842368829672627,0.38985407903433417,0.3610945861369271,0.37502981331933427,0.3690515974377993,0.39056737705261296,0.4100980568155448,0.37596519820799335,0.3788661818028586,0.38105872896348236,0.38770039762406444,0.39643401587684596,0.3859585134070478,0.37485327098903964,0.3723698912965066,0.3710252524871121,0.40776621929268747,0.3713510299572788,0.3632718276445148,0.3986133388078226,0.37228728606258,0.37922561929577375,0.38039349421682017,0.3732195244087377,0.3803300604442162,0.37937349361627976,0.39234997946493133,0.37637639815881335,0.3725490939485517,0.37346944994113296,0.3670956262472866,0.38815689809644754,0.4010164309683705,0.36101711542376297,0.4006682222816415,0.38201798152461625,0.3820714182120407,0.3841971690266837,0.3897697805635745,0.384820955823492,0.3619297821945092,0.38060594997116626,0.3777116927433066,0.3688042060501833,0.3778323044647798,0.38214360025657756,0.3850065586814031,0.3810146250497056,0.3412315185541396,0.40048454243455334,0.37913905806624554,0.36645568980420895,0.3909948595871045,0.3871196120622566,0.39026648687223575,0.37876142165398313,0.3828198070444193,0.3563558956609992,0.38019733635131897,0.36861718755098943,0.36472544912016985,0.4029253280213948,0.3837327089735145,0.3621070449486625,0.3845404315504664,0.40634134599658706,0.3828594344890267,0.3687055728241307,0.36395717832193136,0.38150507464419753,0.3702586589919467,0.38000975160662087,0.38368290180021525,0.37402824105485105,0.38420605648916223,0.37489971262290084,0.3881668193185414,0.39328828472693933,0.3825981092969435,0.37612958251279166,0.3806421215471969,0.38348322017476355,0.3745687542117232,0.3647174913424788,0.3772555948940721,0.3880488982449829,0.380880546198665,0.3622276764338293,0.3876824204625416,0.3696236216239601,0.37849987901859533,0.3820781529282821,0.3520227220010827,0.37995734878122006,0.35941332292809175,0.3695753168456358,0.3772528717029173,0.365651346873906,0.3697790919829092,0.35323574757802617,0.37621459190596107,0.37291932053406884,0.37300605912574913,0.37088262827457597,0.3828101194753599,0.3953741649761933,0.3746732920183963,0.36296269533360714,0.34745981639881307,0.3884739578395104,0.36076570658533746,0.37063732366391283,0.3522158181633708,0.40561164886326456,0.3845667058125211,0.37533208235337573,0.4133282871705353,0.353759283290958,0.3704636589083061,0.3710721745821887,0.36825286969391346,0.3592083085108636,0.39253904923785543,0.3815203143362715,0.3819277007498898,0.39605106564296816,0.370043386750724,0.3791193050656236,0.3834866352729223,0.37113194043582487,0.37815264190874914,0.38825585581960986,0.3880274512891571,0.38300576168201395,0.39221999261432683,0.3789945719493345,0.3625770766293175,0.37905915016941627,0.36348972293220505,0.39648765077305687,0.3770426453216578,0.3934737200191863,0.37214227674177947,0.379431835622086,0.381528946016709,0.4076178494821462,0.38780215658825445,0.3693605877303023,0.39137621845813303,0.3483071884981478,0.40397511084817533,0.37591876351171555,0.357813031174992,0.3574166760224428,0.3693815141886458,0.3815957077965368,0.3728327278264273,0.38990551216518354,0.40440771077941157,0.3765945284252286,0.38315661380263255,0.40323627817803354,0.3664624576531606,0.3806682284792539,0.38288543904323225,0.3987687322636126,0.3760937726501047,0.4009246459234692,0.37718591141607427,0.3790902659649581,0.390425011344759,0.37566729893369916,0.40402972356374744,0.3929833296053707,0.35729326969902847,0.3837921727550851,0.394490921160805,0.396396535660072,0.36046581823820467,0.37924009810194137,0.3848080909381746,0.3615867346471942,0.3874594301407992,0.3930120016664968,0.3811743620324183,0.3640541737850245,0.37404748209792377,0.3750445431500776,0.4061030200015051,0.3949538650579614,0.3684282402271058,0.384700391026883,0.36649428392944844,0.3830790765577731,0.3806703741295282,0.36740123928068913,0.3666008023497084,0.3619387944942721,0.3757749591681347,0.37841162152828794,0.3664657167202127,0.40115777577887285,0.38788764295103806,0.38724781150472626,0.38951593559742564,0.3702034018747111,0.35966429714110904,0.37501918187814776,0.3626956387270965,0.3688566338621117,0.3577836004794128,0.37846105076948294,0.3705761929916488,0.3791089041643079,0.37047728122599766,0.3801441812618559,0.3953387874553515,0.36945953077284127,0.3954545522419028,0.37961198713011185,0.37469503726962305,0.38016923509684114,0.3788291430862742,0.3870623860177317,0.3750449783667619,0.3772935402650943,0.38191751391489737,0.3870014559859127,0.37928431545804353,0.3429336188228665,0.38539150021720037,0.37521137331047,0.36993352337423324,0.37578435789824155,0.40858271717186617,0.3647464943720817,0.3405719664257476,0.39705265251343713,0.37788160483027594,0.3787160865559323,0.3816612588285754,0.36873774024362777,0.36849438603271717,0.3663762663992848,0.3814198139039246,0.37009752634300497,0.37159978985346276,0.391686431013982,0.37517655905176617,0.3955330489133925,0.40038130059953,0.39149746815818137,0.4176072441172487,0.37346184839970736,0.3847948702729035,0.36678004371862233,0.3825124132644939,0.3846135702397889,0.3800360997165953,0.3830848467800355,0.3801364209097494,0.38360720459874414,0.3776774063665457,0.35743677169255056,0.36444618489428343,0.37101672819310694,0.37591490667202665,0.38503620089946466,0.37629090197754567,0.37778347395179857,0.3810066930922701,0.357205149755699,0.3863338669869268,0.37956441689755593,0.3868983211667546,0.3836728969448078,0.3714797235083837,0.3727461546175087,0.3953184322795542,0.3943377537413308,0.36120719462718254,0.3780798233542928,0.358725735607675,0.3535035276360087,0.36362833072541706,0.3837285659154218,0.37587982136150994,0.3768523306969999,0.3605312472713084,0.35489049207527446,0.3667288051932287,0.36775953671755857,0.36375272812628406,0.3711179294640098,0.3894788929778749,0.37626764555117886,0.38968779159640926,0.4035891806453435,0.3845505332125376,0.35267374740785795,0.37461620475086255,0.3739512427505619,0.40493570252771743,0.36527864043980157,0.35566475222550326,0.37090709954246187,0.3603812091678792,0.37643519577768303,0.3689384194271831,0.378223914026032,0.37276052381415636,0.35285498737344995,0.36726970825498734,0.36034432208589795,0.40053072985220806,0.3684773445418776,0.37900201018132207,0.3757090638784712,0.37711039487607273,0.3684948025296461,0.3607530629832586,0.3624114851440172,0.36670109099181786,0.3977818448616472,0.3542144923807277,0.37841761533694146,0.36556092349437785,0.3616289254408248,0.37463101944720634,0.37722847755762473,0.37075435352821123,0.4112685230312809,0.3917848300957314,0.3709273019269982,0.37280031877675,0.35961633548254396,0.3492704164502436,0.3471176360571807,0.38731990097909647,0.37682360189263714,0.37633040288030917,0.3788644503405394,0.3545631987627813,0.3737008524561253,0.36090082125310824,0.3947413000938917,0.3754208647550795,0.3792951357231805,0.39514190538461,0.3879286483470873,0.38445236868627275,0.36349295660137304,0.3929000090501832,0.3539925393930687,0.39573729452922024,0.37581269719180405,0.3822028810529155,0.37069772504067966,0.3772788738859336,0.3959727999663732,0.3632918972978945,0.37981425909927036,0.37035733325854975,0.38855215993764136,0.3905180805124084,0.39329403669348334,0.37337677579278844,0.38953926943006123,0.36374278925649056,0.38851213626513437,0.39140243429834687,0.3820278835129359,0.3614261974132179,0.3706053228717991,0.3583925622467374,0.36270743145184886,0.3770095470629737,0.3832006383572732,0.3623497308538812,0.3846548962653882,0.3745124435744364,0.3671821084332471,0.39104667941712784,0.3888457640608176,0.4023979073290529,0.36849525882818296,0.35891964574995,0.34001453007371324,0.36418469528344344,0.382455432749166,0.37054636786412365,0.37412156226768184,0.39127061170496746,0.3939190936395587,0.3667255977385321,0.3661455143140559,0.380136785601461,0.3946695119427704,0.36739529070607285,0.3732398071144066,0.38801291170354735,0.39136538011970456,0.3537984744614031,0.3765117039131531,0.3682889372633153,0.3741814362436328,0.3844241591367813,0.38217153124524905,0.3827135365113253,0.3525274967793019,0.3732662066506366,0.3851148777648262,0.38177174005128817,0.3554957172731015,0.39415948472065865,0.3728940281145168,0.3682867698932282,0.3730362499483874,0.38517219037167005,0.39050915900241806,0.3925391592913974,0.3851933990811379,0.3827487223198678,0.3922933692373323,0.39151987031982255,0.3847000714770159,0.36382043698461425,0.389456403568905,0.36913830485092214,0.39127452565840687,0.3702187628474257,0.3651475196322268,0.35063125987400817,0.3776973258134561,0.3820386196208066,0.3834034576035528,0.4081016020788856,0.37134617249872576,0.384310508019156,0.36293808424997825,0.38792592361247535,0.36475169786312983,0.38418056921646576,0.3719844168223866,0.3875989718646425,0.39449101329628944,0.3729010380870976,0.36622719299418055,0.3501820326446278,0.36193714972131646,0.36663675532770357,0.38821806864654523,0.3725330507878748,0.3646363733411438,0.38420849155973635,0.37958928656577323,0.3562411742556855,0.3744328929711505,0.3682581879295143,0.3630761453528985,0.38870739603438476,0.3857824302063151,0.38039732107743746,0.38879451077217375,0.37596399959718574,0.3773802042154318,0.35883594011734893,0.3812890657526616,0.39781415655215,0.37404546567715663,0.3819809836506981,0.39095853732115826,0.37937199456997084,0.3698255165590299,0.38945468560393454,0.3954411247162443,0.39865373769974904,0.36735414751959694,0.39295994105339066,0.3571322037732479,0.3679447196645371,0.38685450988102127,0.3975623316003064,0.36868655170507625,0.37837412528706094,0.3653612500351443,0.38140998052756836,0.3839978810890912,0.3939167137127105,0.3539678451898253,0.3778488698496282,0.3987687639996439,0.35709442472487446,0.390023630387334,0.37370407782670634,0.37151906460333517,0.39025206898894427,0.33704713634259875,0.3744169769238306,0.38751968492975286,0.3625860153448088,0.39283891025926837,0.38558101578044685,0.36650189646511244,0.36618013878723193,0.37323117195726874,0.380928749787622,0.3823348318373102,0.3863413641926581,0.3630873014967129,0.3745663343159872,0.3835751275719625,0.3809501269395546,0.39681763306750795,0.3748209425807663,0.3813666028205898,0.3896253002002159,0.376502089781038,0.3855129929605258,0.38448460670130136,0.38336862994047893,0.3787003889117973,0.35705029155948215,0.38627177003830493,0.3889434575941549,0.3702806488726893,0.38342208229126895,0.36532309924154177,0.36982988460891997,0.39727451203515785,0.3911879754610852,0.3825168554747847,0.3521579972471326,0.3906289276490965,0.3795464376479596,0.3738902204620846,0.3727319503633478,0.38420822056033127,0.3644398610552482,0.384756336977891,0.3835970720986019,0.34620353429261025,0.3764579606409177,0.3926054520900317,0.3688863633431761,0.38997361915522416,0.3759602934182604,0.3994523611032865,0.3720453314679104,0.37463171921567984,0.393227009337283,0.36201784776791385,0.3714496627770859,0.36061055116618285,0.3713278777424488,0.3629171879144402,0.3634509204603313,0.39150194456491,0.37372137546385664,0.36418801480738366,0.372215831794524,0.36357991059946504,0.3940491731378943,0.3726648402683862,0.3753029840733266,0.3841270832447446,0.4038610181207154,0.3801679924202363,0.3780442399038352,0.36980764163263113,0.37890049419663574,0.38596160064871443,0.3813623893251397,0.39841177949727696,0.37082295184773406,0.394668297805924,0.379352154064206,0.3784752988599853,0.3626718236573607,0.3723718798969056,0.38605385607364645,0.3865724971462956,0.37219703752296573,0.3924455616903696,0.37774685895694354,0.38378654161813314,0.3719401049355938,0.4001567816565325,0.38220219210060147,0.3752936856408595,0.37017770242128994,0.4168685172080268,0.37876164157552294,0.3920491855849433,0.3800371091874239,0.3919774459566032,0.4035150434684761,0.3932885383440551,0.3759876941502637,0.36604529938280345,0.38426449428761317,0.3791885384882188,0.3884767568003099,0.37585763884862883,0.37260329192902547,0.40160285884320124,0.3791246215881888,0.3630429641034888,0.3865489284673497,0.390646847982161,0.37071170502467476,0.38668167275946047,0.35409269043576935,0.37230385559497275,0.37328003588937086,0.36848023432532084,0.3813330616318325,0.34856636348668013,0.36466745382773297,0.3920104864170494,0.39110540622333473,0.37390486969719405,0.34897587667212404,0.37355911325943525,0.37013991677394675,0.38118352814482115,0.37949343326765494,0.3730795330360375,0.3568973744958041,0.39021111423147015,0.3934661723362044,0.3888275428417685,0.3591456909145697,0.3753644235708482,0.3775182988706347,0.374704439765797,0.37524051028492,0.3900765639706408,0.3942428539364561,0.3627159270494972,0.37365853922208137,0.37448223589888596,0.36111034430849076,0.3781366870359256,0.38021163756092696,0.37815575573160987,0.3613106718701191,0.379794951880632,0.368380800016118,0.3590428522943684,0.37942326364850043,0.36870216698431096,0.40314541184380576,0.3720493293929388,0.3727635774678743,0.3996064005466644,0.38019847442215465,0.3720619088011735,0.3529254312975984,0.38528753282039174,0.373178318113576,0.36946241881637126,0.3823584994769078,0.37507264887501324,0.37356702892116583,0.41857552179802765,0.4064653063374685,0.39560505639815774,0.39132042821066204,0.38629711413607926,0.3978152528586837,0.37071562048640194,0.38404687734012793,0.3662246300748402,0.3832070504950172,0.3883340594802202,0.3719895350272557,0.3777390861026597,0.3709210181446554,0.37353143713783354,0.39802399519545556,0.3775784133368053,0.3725929841382938,0.3767948074917607,0.37161731898212624,0.3614888031323891,0.3806542068847249\nCONDUCTANCE_PM,avg_node_weight_complete,0.141821474499,0.00938754971977,0.14614270684092975,0.15879295508772043,0.14881702905852415,0.16728182757146728,0.14500053708012967,0.14555446444496767,0.149102658227316,0.14461901756826007,0.15340603423546714,0.15000084178137923,0.14237752242071566,0.14681137420187995,0.14113930680354372,0.1431525653159383,0.1331209896506221,0.13458369549999655,0.136723362047532,0.12853962177936187,0.15420042647258708,0.146037671419195,0.14278587020551578,0.13911467309162115,0.1466952036878623,0.14976012507496275,0.1352016705701663,0.14371795833442733,0.13414056040399236,0.1313894901765858,0.12383810104660913,0.15532670274202842,0.14986798248066308,0.13857457535901063,0.1461947142509152,0.14233695454774173,0.13934273593982574,0.1480796666626536,0.1334901586567016,0.14095624243034002,0.16019177136652335,0.1430662212731621,0.14197984910437958,0.13124081678458177,0.13702843418557567,0.13817813302513535,0.14325279313906975,0.12819198868887702,0.14385800548097732,0.13172646623998202,0.15588224948930457,0.13719042515026247,0.15056530374424665,0.13492204616232,0.12942716267145205,0.11980799343366669,0.1460495962681005,0.15144886601390123,0.13350078468841148,0.14176091199943286,0.13727931672148616,0.13852710629259268,0.1312216286289194,0.13700050472957911,0.12114922192824586,0.1640395125338153,0.1459147679053397,0.14765371742627653,0.133148030787278,0.1283338784472948,0.13608725443679262,0.14579932582024685,0.13679680052372273,0.12407180032852141,0.12749978591087294,0.1408267085699856,0.13377767565215135,0.13898631725948046,0.13753072491612556,0.14385644408140263,0.13690388173138987,0.13647579030150972,0.13481920305117542,0.14426304822843422,0.12902615634673112,0.14730125679999628,0.14392041353122959,0.15109656683448763,0.1270755584895423,0.15117128576268832,0.14531250191316408,0.14068070614986355,0.12811475326394475,0.13803040266774666,0.1355092996679978,0.13757027917726264,0.1264331319972278,0.15802191040328128,0.13381191062113074,0.150616975164328,0.15571321328167065,0.15281341168618884,0.15674447909572012,0.13556130686635293,0.15428342646423426,0.14867773380107857,0.13551645065609252,0.13276964318644685,0.13557688612483473,0.1393780603641706,0.1474540657367529,0.14814063827132234,0.14755446729248853,0.1294328237032842,0.14145544660390452,0.15114205702478495,0.14649532525805958,0.13788086818035547,0.14529047543253554,0.13366044155033657,0.141505188711303,0.1504851825873781,0.14584662171458534,0.14313480186514133,0.13613848229577039,0.14142337144622766,0.1391480469954069,0.14572430093118294,0.14383027094853615,0.14873970899911854,0.15464613148926906,0.15114624357728618,0.13492153716412358,0.13232432443827283,0.12812734448724128,0.1403700298992084,0.14392244435181467,0.13738262262174344,0.1612274482780963,0.14300708605660253,0.14515325814120736,0.13486524808272463,0.15461671377258243,0.1535112204474309,0.15749241057690083,0.1389724786024368,0.1403346979734514,0.13673924481322783,0.13751430220051108,0.1342028621212978,0.1383015078929032,0.14157036010004098,0.12792965174030568,0.13278757872739208,0.1571385850559536,0.143169093580287,0.13710314392903084,0.14389758062394897,0.11791799756251116,0.13193355738058687,0.13899622195011943,0.15840882912070903,0.15091098862427038,0.12647760492756216,0.1394388782079126,0.14160028721837795,0.1226423908848399,0.15010395319400444,0.1510985957639142,0.14041723582185617,0.1378139645798999,0.1423532431598664,0.1429543987439246,0.1471595242066222,0.138054095052323,0.14011524928457583,0.14162338926896847,0.13634495665286717,0.14829373300056353,0.1461923482771897,0.1353754794297082,0.1573289992067639,0.13772342338190685,0.15285236869228933,0.14084532045157652,0.14326109885317517,0.1472315335083232,0.1460810549059622,0.14938999275120163,0.1676536795116725,0.14667463228089014,0.14669583131152492,0.14546029652313786,0.1557564442984432,0.14046891063847675,0.1520033809453263,0.11653442553845698,0.1499185354807945,0.13297054792928298,0.15227976521873016,0.12695660430500458,0.1413550625638508,0.1395415544989942,0.1462816491828612,0.13638823333616265,0.1424905998740092,0.13470933190244194,0.1284263482630429,0.15924231609297393,0.15553685703618103,0.13008314580510194,0.13371067556456576,0.1465387778929459,0.1473899714969625,0.13065879243341005,0.15041249554845953,0.1446383619250086,0.15794253236522987,0.15004711931926226,0.12750453045035218,0.15404424688667887,0.14616929168011386,0.142513573995758,0.13055168358472818,0.14243568985094401,0.14001456580845217,0.14778278092269978,0.15380998981996924,0.13981101290755202,0.12232449779166862,0.12388250872546426,0.14666660100195764,0.12230985011299321,0.13213555566175791,0.14901451372964342,0.14868897669041167,0.15039963211845128,0.1361132234116288,0.13227597463131718,0.15341549806304483,0.17316523705322884,0.12225656057169113,0.15295270906116398,0.16647906347712887,0.1536078222845122,0.12746353527920143,0.14123368420837046,0.14851448608728338,0.13900093708739553,0.14505415587126866,0.14632330434444668,0.14228561177904747,0.13899019372724916,0.145120659589459,0.1466277156322443,0.142912419788145,0.156384564208274,0.14991114700750313,0.13642447640719504,0.13862612005754102,0.13316652843896426,0.15328741192762319,0.14029438336362832,0.13691854004359488,0.15731541277751881,0.13878399757543325,0.13467502863511618,0.14496538049094798,0.13781797730800097,0.1502750223589139,0.13052622400880246,0.15016865742013555,0.14499288750255848,0.14428542615024362,0.13941446201671245,0.1348785516816429,0.14173478839546225,0.14142811001591343,0.12806441965052026,0.1359506998579364,0.1591874096500555,0.16475807828071776,0.15525458132621386,0.13709424502027293,0.14085355752123815,0.14490820398545348,0.14000706310071845,0.14950030132007572,0.15048538247701,0.13288795845142,0.13610351237585494,0.13904898838458457,0.13372496980741574,0.13088114926613273,0.13487101792523531,0.12830871595466434,0.14905824597539458,0.143454979625566,0.13018629636807788,0.14953318956139947,0.13072533819840668,0.14076458855945845,0.13002746669272658,0.1346827436418024,0.14623431977203263,0.14389878003176126,0.1341689351291202,0.14092860146958033,0.12419461660040246,0.14746620243485037,0.13697234758798368,0.13026146601662958,0.15793606835580473,0.15343373118447823,0.13604260089872144,0.13991470842696938,0.13636205103230475,0.13563212251981688,0.13792389165443916,0.14944035148280702,0.14268827618760724,0.14819941350899016,0.1433617325048022,0.1576851684343328,0.15367452604469636,0.15004879468408794,0.13729044761762002,0.14174236829994463,0.1494604720756127,0.1454230603273806,0.12925089340275286,0.1361676154921792,0.14209561313045957,0.12314754898513983,0.1312990890901152,0.11332749899876776,0.1563344624217254,0.15383124023766606,0.13996269229100522,0.1478874160823257,0.1470191522984307,0.1509006534726358,0.1539471984828135,0.1404035988552182,0.14168937121995476,0.13128030706029684,0.141789319217299,0.11818439531676904,0.17185294227822548,0.1501064042965582,0.12566874314662105,0.15077762758754978,0.1524980826785094,0.12856507260493272,0.16185499337927545,0.14026979951407376,0.14328272374317155,0.14655923886515748,0.1472596872369441,0.13719212035853418,0.14543544964385924,0.15047711801592503,0.14846880215762223,0.13554255176865404,0.13014828384844596,0.14737901201233086,0.14570039513537972,0.13593801094206653,0.14914545789748762,0.1486904438779643,0.1398911243878127,0.12815365710086096,0.1282214689637584,0.15622095607669212,0.14978505845268886,0.1400778817752193,0.1376824575552587,0.15810787932423062,0.1326572017893989,0.14609581269882452,0.1415145004118972,0.12477448567543625,0.15474772946351464,0.1446362364612627,0.1228263946398867,0.13605891668241665,0.14865629444695946,0.131064456990159,0.13987860504837502,0.16881885907346986,0.1389694009008753,0.13628893987542942,0.1459228095895903,0.1486226339758319,0.14904580141384607,0.1590698218497518,0.13941163567637033,0.1429188220331298,0.14980196100004373,0.14160155709023026,0.15277271346571014,0.15982292829065584,0.1420036222683309,0.14750414119815886,0.153623460022179,0.13675463038880198,0.14832246914097585,0.14008972458514976,0.15249877504245363,0.15707684354858173,0.14210269686237006,0.14807051229136833,0.14774379543721408,0.13706479030513136,0.14374638230182576,0.1457640455333401,0.16001017929683364,0.1463203263108474,0.14738799714485346,0.1366071597752103,0.13042892768022898,0.1320126420076989,0.16108580938526354,0.14803162922660143,0.15884108348306597,0.14630139932158664,0.13406949095212647,0.12557478369338515,0.15583830582558408,0.14865614453337267,0.12942581671886028,0.14656506372225853,0.13457673772130344,0.14615575601355996,0.14333573777010736,0.1384818778963535,0.15737615130513716,0.13271698976887905,0.13634045406979708,0.12983474782847168,0.14658646001983866,0.1480047764401472,0.14872644989643094,0.13920099465224942,0.13292852300804575,0.14658943772182367,0.13656805679298822,0.14984015763583353,0.13929425143777624,0.14253651631637448,0.13171490711027828,0.15093176594459473,0.13780673521501294,0.1361730758334635,0.14010113376791383,0.13699153713497272,0.15558872435824309,0.14381949656290424,0.14540583931882392,0.13018879186594634,0.1392188840668106,0.12781930690168994,0.1398855547372768,0.1528029664268531,0.13215171901415249,0.1446025727452711,0.13831902173176586,0.14220575149043035,0.13944689733787155,0.14666677207208448,0.15764876622399393,0.1533036929483208,0.11381979945818012,0.1468688388707504,0.15185432117112913,0.14353606173370984,0.1525565660916442,0.17258705117049206,0.14310963503624247,0.14013787844523637,0.14203457097338276,0.14822759124818793,0.1381003443867512,0.12202375083544396,0.1532811471867863,0.14581799592082909,0.13944786069706522,0.14884409340253005,0.11591762542988654,0.13673946268941506,0.1437971871558974,0.1501628572266421,0.14134879512931295,0.1495665816594511,0.14092775684307976,0.1448338828352068,0.15095565086928212,0.1476327718414999,0.143852824513553,0.1532875373553995,0.14361177316358006,0.14125745460917336,0.1528997078271905,0.14136070759252478,0.1397103978842277,0.14594076566957664,0.15249795589236115,0.12183534370357077,0.14162256418762229,0.13850786729867898,0.15135847074023515,0.13844131705067844,0.13634721791206159,0.14894053910855568,0.12877329614422234,0.15767442124712128,0.1450840416918127,0.14876038195591698,0.14748736878298602,0.15196638966387754,0.1526594563283312,0.1419279338979677,0.14046725704426932,0.13986033471620893,0.13547834027068006,0.15523404648069702,0.15295505421385563,0.141545284275039,0.12208536315779753,0.13808492285394192,0.14697906213112574,0.1275907667636541,0.13855979750677289,0.1590315127444527,0.14713270255384897,0.13925930615801876,0.14661772270711027,0.13021304956117183,0.13841605294162082,0.15505348678408262,0.13961037076458363,0.1471036928498779,0.1579571003740906,0.13902753518554353,0.13316542431973183,0.1527709958917017,0.15362972250301632,0.14443658138711823,0.1507790564915095,0.15309592780883358,0.14671699989141393,0.13104326295578111,0.1378418868711409,0.16286020894073272,0.15416270235619806,0.13753670745493002,0.15152063945936722,0.1459624251993036,0.1308777515863871,0.13699279365938943,0.12832773275440598,0.15081689130569856,0.13827923748961984,0.12423662507229624,0.1398617731512534,0.15399286776846854,0.14866657382465173,0.13160267827304298,0.14287788682803818,0.14640905941627386,0.1287083950327177,0.15206405512774832,0.12139463341594055,0.15330961793438902,0.14164775532807095,0.13677272256243472,0.13038028944260038,0.13645565155403666,0.14345147027764096,0.1435669294262284,0.143860127594729,0.15377808601514606,0.13170723359074787,0.13627725665189716,0.1290558290551972,0.13037034838879344,0.13757717122054505,0.14341555731238761,0.14801188066791496,0.1381911646182002,0.14278365429191267,0.15030540180328783,0.14414247374322056,0.1519300433510071,0.14320883328001613,0.1439956083883985,0.15095136175723867,0.1320540255227435,0.1322232242728748,0.1283105554642104,0.14046176577824143,0.11952475940974626,0.1491559577354307,0.14903160838779234,0.14429803036855987,0.14504197563348611,0.14425123473711368,0.14340405425867603,0.14055568330132429,0.13221820395543346,0.1159866238895662,0.1587281198516695,0.13396302899832732,0.1507252184795775,0.14177220619522082,0.13853115633654034,0.13059889541969585,0.13783570289364497,0.1504525564270072,0.15645063637928364,0.15650188079201605,0.15278912131304836,0.14683579791209228,0.15542574368279685,0.14383172553411602,0.15031837577096493,0.14909352696803815,0.15082581164642034,0.1374450935326088,0.1383435833787269,0.1303939737114663,0.1369228805785274,0.14101069841372776,0.14780014284356155,0.14976483269763713,0.14393148087395125,0.13084950446495563,0.15997237352055024,0.15283761612482324,0.15922758913988436,0.14288698149633067,0.1475972964293955,0.13646937306391843,0.14883114336927916,0.14273132944384623,0.14655939048640623,0.1371658710132321,0.13420890797387972,0.14148553250516838,0.14187188484220817,0.14415293062746268,0.14641080203992948,0.14717425614829777,0.155679812530258,0.13803319313907486,0.15563016909863464,0.13104633574533134,0.13911591239893234,0.14611226584300194,0.1435381239711855,0.14668900935943904,0.14176771825912993,0.1424809955055247,0.13274446118065,0.1403703740481386,0.13294712710204518,0.14874854104851934,0.15621692407428658,0.13174074320834198,0.1322069924522355,0.14172131080992023,0.12627370812183272,0.1286508422722413,0.16170049756462584,0.13468850440601426,0.15396684266175645,0.1402335831065243,0.15159341217885813,0.15522406131989167,0.13545033099572287,0.13594603425822033,0.1451131317339088,0.12648931753601214,0.14537637701485093,0.1222296670282833,0.1434106971841934,0.14651810467597845,0.12798781523535438,0.1404912841579826,0.1440422907345087,0.151801721218073,0.14567010185265036,0.14238553007237845,0.1445006558134928,0.12702093382201027,0.13169681243816245,0.14120814513018132,0.1399512708620461,0.14248623698200572,0.13184087818100468,0.13291959799334047,0.1355155915466197,0.14602202162191583,0.13784405488182297,0.12889959585203062,0.1365857227316723,0.13929851740113167,0.1337438269519289,0.1459109160364842,0.14817709159372375,0.13197378439013113,0.13669403620443069,0.13368478209603504,0.1330666653459604,0.14794080564225826,0.12745766421684013,0.14742772194209025,0.12271608967069603,0.14949547237545746,0.14969401882559716,0.11686853751396616,0.13812455866833304,0.12630344585424677,0.14298908104052693,0.1409738835162919,0.14748439950031167,0.14057787656909945,0.1307970569187543,0.14061963949605427,0.1410224040334092,0.1409442204865468,0.1416167716654942,0.14848569957804278,0.13720318527143494,0.14901666993849044,0.13997134307460574,0.1406729521784637,0.1454758952173197,0.13778460839132997,0.12691235443062565,0.1351355602475541,0.14500053960053186,0.14677273205982325,0.1524263834994782,0.13707684604510748,0.1338722044454027,0.13005688972054638,0.1232760577536586,0.13245361288348065,0.13925812413483027,0.153455679053773,0.13951415377826137,0.14081813018407008,0.14330074537659118,0.13504987834212864,0.1271877932164043,0.13174816836123346,0.15685894885225615,0.12565956991061855,0.13055355763060547,0.1380360471576555,0.15424535380537852,0.13825228335396528,0.13426333221935438,0.14578703027723178,0.14590978457521045,0.13995565938991367,0.14323365765617302,0.1471537269101641,0.13325800231055968,0.14376666124671647,0.13814674957727283,0.15411957888668554,0.130951977744487,0.13824519785322742,0.13339111911392684,0.14289735588856373,0.15390881686078162,0.13330148099282393,0.1391264006182191,0.13010968402825904,0.1435209624984888,0.15627866715951955,0.1396617147128706,0.15480118440756396,0.1439011953572975,0.13805274919287774,0.14739182584074495,0.13507914360320816,0.15403319895400985,0.14973311627038752,0.15409042275512003,0.15453147736476486,0.13796608327487667,0.13774337265931072,0.13959907741462485,0.1442904566024161,0.14191944739286447,0.15050194638533956,0.14104349385179818,0.12490376600876757,0.13621070682262967,0.13994737633545934,0.137291293602934,0.1420506543103963,0.1509666921378236,0.13092163900087395,0.15058849549988446,0.1382700145865106,0.1558190628091765,0.1459050105305136,0.13911801092352197,0.14745586567566918,0.14707189851296323,0.13973186558549586,0.13919342916342475,0.13707181270480204,0.13326551757605495,0.1250042744636278,0.13098703763785077,0.15341192267813433,0.13788343601011155,0.14127285258727781,0.15662053135939089,0.14777381887048335,0.1377922488612306,0.1488929193381273,0.14790320792696413,0.15628544914409886,0.14669520895773736,0.1468613328888969,0.1463747090571495,0.13943556392206993,0.15191374612607306,0.1593160732997038,0.14047627611307043,0.1408091502270257,0.14175656995255823,0.13492983919847992,0.12128381473528786,0.13582042622053445,0.13424751726541825,0.12654225011209613,0.1382353325536611,0.14283820484303533,0.1352365925369339,0.1369435135528903,0.13074995730392966,0.14472371870563763,0.15803839781485937,0.15206271987690287,0.14094094084952258,0.1337477327177976,0.1386251826914589,0.14511521073896322,0.13657139892255263,0.1458784428777938,0.1364865030907191,0.1373657679846036,0.13294299559470368,0.1487840597319642,0.1354235132468572,0.12878191615046186,0.14584508159025064,0.1142237851892013,0.14505124468213676,0.14010882634549918,0.1353526523057244,0.14625347403912883,0.1380833824844029,0.13214240040690195,0.1410761699640304,0.1449639873536192,0.13441431117703148,0.1202883289742896,0.15308860701889684,0.11946565753326649,0.1447992975139984,0.13966646065439572,0.13562595187869408,0.1316534855615047,0.14379456993388492,0.12878992555235072,0.15524397959908842,0.14073378315731486,0.14576197838429245,0.1445045669851343,0.14595731773576684,0.14126765332521155,0.13952508838576844,0.1535358213946983,0.13392971431003833,0.1292441287335393,0.13564188694935586,0.15885487876371443,0.143885431156334,0.12784626474490643,0.13462609075645213,0.14482134630783625,0.13591648606345522,0.13884909285104127,0.13158171144968497,0.15080311329765733,0.15899908569859708,0.1293914971528852,0.13388481118560244,0.12910915056950156,0.14384533786431503,0.15638634252433234,0.15071675931949227,0.14278949867285018,0.14781550904789376,0.14134916688269383,0.1364947416384057,0.12425036059739633,0.13797038514960072,0.14246315958842307,0.12243328629265615,0.1574446604107993,0.1339966605007645,0.14151127132565736,0.13177945458040535,0.139921411701876,0.13866324632340127,0.12901788321398672,0.15168081843845085,0.14626066268824559,0.13198873163319047,0.15399783877240708,0.12525344730351065,0.13896225109633156,0.1372756457719352,0.1645848801033171,0.1479342625916473,0.13885122774943057,0.13288029885625882,0.14682548969402853,0.15161346702198109,0.14174888634008578,0.15009971482155385,0.12809194172041963,0.13308929367591635,0.16227705290414765,0.13261801291776878,0.13514452330047114,0.14110896848314425,0.14027923870698653,0.14181085421551198,0.13953709857876107,0.14816362297932717,0.13493260695347448,0.13309367020914772,0.14266739139403578,0.15064512587946705,0.14259895554558263,0.13464203070133315,0.1414750180014457,0.15359224530919238,0.15683730297322293,0.11652095232959107,0.13572011493825117,0.1322110396261771,0.1500132284081171,0.143013388869533,0.14208342077670152,0.14557955971136072,0.143189314854254,0.14402180323820232,0.1437114977634293,0.15143900836393354,0.14337108017729003,0.15588643379375852,0.1346008561250095,0.14395494786747395,0.14342002018612204,0.13282254962095294,0.1508655320997525,0.127319265976591,0.1408900199001792,0.12628178901301348,0.14145687688509448,0.1350259296793852,0.13277758443248203,0.1323641754946874,0.12588598231886322,0.14946093720227088,0.1405367093460982,0.14293735937751056,0.1492626337613707,0.13195016967935397,0.14662404564420953,0.1492722456571671,0.138922009015968,0.15575253223193158,0.12671716944991224,0.13133379815497828,0.15383397946104635,0.13987892581658473,0.14701162825195727,0.1501469602743032,0.1483024539936694,0.12453987784134161\nMAXPERM_PM,avg_node_weight_complete,0.405421133862,0.0165399236973,0.394660307,0.39123897999999996,0.418881212,0.371872091,0.401206666,0.411731628,0.38817448000000004,0.419606359,0.40561753799999994,0.389532358,0.39892100700000005,0.421057613,0.40221236600000004,0.4186724,0.416614198,0.423544271,0.40843068499999996,0.38770469999999996,0.410623079,0.422069739,0.40059173899999995,0.379846618,0.40943149500000003,0.401943546,0.408066523,0.41504573,0.382164513,0.39676007900000004,0.445314661,0.428071381,0.405088346,0.4069476510000001,0.41284668399999996,0.408910317,0.389119653,0.38334381700000003,0.44185758199999997,0.353629422,0.398397469,0.42883822999999993,0.39826159799999994,0.43653104000000004,0.412321065,0.41605095200000003,0.400683669,0.365224318,0.41198889000000005,0.39895311400000005,0.407037248,0.41858117300000003,0.40615261599999997,0.434676127,0.41012691,0.433552808,0.39474637,0.396484196,0.404873436,0.39133201100000004,0.38417968500000005,0.377518288,0.38252880499999997,0.410259157,0.414497998,0.3842314210000001,0.38848573,0.40697998199999996,0.42873490400000003,0.38691346900000007,0.415613709,0.411196103,0.390674024,0.399880815,0.3756987849999999,0.429543515,0.39191765500000003,0.40595540300000005,0.39231772,0.434108299,0.387958176,0.38407604300000003,0.4169733,0.445544755,0.397146275,0.426804705,0.396168751,0.382804704,0.44409497800000003,0.41072863,0.400125521,0.424032935,0.43262626,0.39050821999999996,0.41165228200000004,0.40833171399999996,0.39677088499999996,0.430622206,0.41236354999999997,0.402046305,0.41297645299999997,0.407763206,0.41881208000000003,0.41121538300000005,0.394792404,0.42977946199999995,0.40314166700000004,0.45440139599999996,0.403087878,0.40421617200000004,0.41302487499999996,0.425228045,0.400295949,0.386552169,0.420048016,0.391260285,0.40161336999999997,0.422832969,0.394139619,0.410031376,0.424182218,0.380793234,0.41325330000000005,0.41279010699999996,0.40854927199999996,0.410449741,0.38504122699999993,0.401309328,0.433281934,0.41973077000000003,0.42136668099999997,0.399126891,0.417349509,0.409825508,0.397975639,0.434158016,0.42131497100000004,0.4098434,0.40007435500000005,0.39174579600000003,0.39491760500000006,0.426399134,0.42068606399999997,0.395501274,0.41790407000000007,0.42965802599999997,0.393523042,0.400260571,0.410286631,0.40877059,0.402022378,0.37945020799999996,0.420045993,0.43609934599999994,0.396424294,0.41955111,0.4075975720000001,0.41971929700000005,0.41145077199999996,0.38786174000000007,0.41801534199999996,0.417605119,0.40913851999999995,0.396586467,0.395682259,0.399146644,0.40715979500000005,0.41049497499999993,0.428086738,0.414537965,0.40646550200000003,0.38681885299999996,0.43110719900000005,0.40798192699999997,0.404550727,0.418953,0.400534648,0.37246468,0.39402980499999996,0.3917511020000001,0.422516157,0.39551569400000003,0.39443307600000005,0.39756169,0.41661259100000003,0.40240746,0.392040458,0.43140172,0.398859334,0.373885675,0.39884250800000004,0.368775073,0.418904899,0.38669639899999997,0.40463827100000005,0.422962659,0.41242328300000003,0.389860621,0.422308399,0.412391257,0.40847429599999996,0.40879939200000004,0.41538675999999997,0.41188311899999996,0.37901903899999995,0.38261584600000004,0.393345105,0.41059210500000004,0.40418552599999996,0.40031417399999997,0.392688827,0.41131203200000005,0.38584345500000006,0.39671542499999995,0.41255911700000003,0.395259216,0.40774924199999996,0.403549932,0.38939334,0.397013828,0.415900697,0.416838632,0.413246316,0.418085404,0.43280055500000003,0.427109285,0.414865162,0.434683989,0.40497657,0.392632524,0.406015248,0.409278411,0.413933998,0.42088211000000003,0.407422998,0.418966947,0.431665144,0.41435434099999996,0.38291635299999993,0.452506262,0.40938041399999997,0.398430833,0.405332367,0.398513285,0.418940411,0.41228047100000004,0.42095403600000003,0.433750223,0.426003006,0.40771569199999996,0.399126636,0.390775251,0.4204504459999999,0.41027693,0.383152226,0.374686258,0.39762093600000004,0.40797382599999993,0.417270056,0.429397398,0.39429295200000003,0.425269514,0.41473415799999996,0.40774847900000005,0.41345396900000003,0.40899812,0.44695823199999996,0.41881697599999995,0.41074987399999996,0.397489259,0.416146421,0.40942863200000007,0.39291977799999994,0.39140347400000003,0.41043172000000006,0.40370267500000007,0.405365925,0.415478646,0.413420414,0.402527041,0.412714292,0.392656788,0.402819103,0.391290433,0.38339077499999996,0.38827332200000003,0.39292175100000004,0.396384354,0.38516099099999995,0.403951822,0.414136328,0.38436854700000006,0.402973627,0.412107405,0.41552301199999997,0.411832722,0.407776338,0.379908602,0.403401631,0.383593881,0.44002813199999996,0.420931078,0.41432026299999997,0.41689213900000005,0.404019456,0.410696421,0.402744274,0.391402146,0.401872232,0.40403581000000005,0.388822345,0.380089598,0.40247881100000005,0.430354872,0.41185274,0.410354906,0.43199507900000006,0.373076979,0.38876737399999994,0.412314881,0.41671961900000004,0.432126857,0.39073807499999996,0.386606701,0.43223354399999997,0.39208373599999996,0.434924431,0.41915750400000007,0.426408344,0.41141848999999997,0.39200430599999997,0.431474374,0.41861674300000007,0.39225210099999996,0.41388273299999995,0.407714597,0.40752864,0.39026942600000003,0.418301149,0.407089264,0.4237858179999999,0.39025471600000006,0.390232155,0.39546937,0.40803920600000004,0.42211204300000005,0.392738063,0.43887482099999997,0.375143511,0.42513085400000006,0.401632935,0.38311940799999994,0.41769262399999996,0.433633955,0.41167488300000005,0.39669126699999996,0.416111786,0.414527769,0.412570473,0.438492208,0.403727294,0.41360432199999997,0.415589431,0.38699212899999996,0.439208747,0.378685018,0.405230908,0.404266432,0.40385718,0.409664083,0.40909655500000003,0.44157496300000004,0.380923702,0.401403153,0.40967952399999996,0.38577236300000006,0.42162080399999996,0.428558109,0.405620608,0.424222473,0.402990252,0.40616988600000004,0.405439483,0.39778483700000006,0.413402838,0.421739742,0.410832806,0.400992897,0.444503855,0.41722950099999995,0.40156349399999997,0.40137330600000004,0.441488122,0.39893534099999994,0.382717506,0.390480999,0.39866329300000003,0.405699433,0.39811161599999995,0.4230062359999999,0.40440914400000005,0.40019467699999994,0.41081658000000004,0.401587263,0.376643805,0.385797631,0.38517776800000003,0.384293209,0.40845744300000003,0.366334902,0.41446189299999997,0.419651114,0.383799887,0.386329793,0.40646394199999997,0.40002750000000004,0.41206251899999996,0.41562145000000006,0.412364773,0.388759062,0.40778997000000006,0.39482421100000004,0.413068661,0.389657384,0.39832490800000003,0.398174718,0.413501652,0.38376759,0.415236967,0.42133514,0.417378752,0.450975235,0.37744267200000003,0.40571272799999997,0.392496573,0.388000289,0.389858327,0.401573078,0.41939412800000003,0.385172338,0.40429963399999996,0.38708599800000004,0.405124776,0.39263374,0.404647176,0.38810241100000004,0.402573258,0.406369344,0.385954343,0.426907303,0.406116359,0.39344862399999997,0.405169601,0.395846046,0.42283348300000007,0.42917733700000005,0.41788242000000003,0.407618744,0.40368898000000003,0.434169154,0.42309700399999994,0.392667174,0.389225842,0.377373993,0.383413673,0.43234396,0.38614330599999996,0.379778197,0.36925474299999994,0.409505189,0.40665573099999996,0.38605797000000003,0.439020417,0.420736914,0.377561456,0.406116761,0.438704568,0.40515055,0.39425564599999996,0.442872204,0.410224843,0.398003019,0.385271053,0.41392226100000007,0.438140086,0.39823911,0.40437894700000004,0.42849154599999995,0.41552214000000004,0.41279464,0.38151939300000004,0.43795689,0.417710078,0.38878905699999994,0.40370557600000007,0.42558363400000004,0.37238889300000005,0.407786564,0.424042177,0.43215739099999995,0.399393011,0.41870369100000004,0.367212224,0.40985205799999996,0.42165789800000003,0.38862903600000004,0.40652637100000005,0.401487783,0.40061575,0.407791897,0.407241089,0.39654832,0.37775710900000004,0.439247832,0.43001444799999994,0.393761212,0.43110022800000003,0.41818679999999997,0.431182214,0.415849674,0.40947463200000006,0.403316635,0.387095298,0.40117296900000005,0.40409855200000006,0.392552778,0.41017776199999995,0.39609912099999994,0.399220803,0.403329433,0.42514937799999997,0.404428523,0.391067995,0.378388939,0.388172346,0.397990761,0.389611136,0.420933544,0.41116845299999993,0.39226568900000003,0.400300871,0.39213655999999997,0.409461441,0.399780927,0.403173084,0.40243949,0.400060071,0.388404865,0.418728984,0.420801601,0.386893405,0.370228905,0.441853349,0.39442801099999997,0.399269777,0.39953276,0.392341311,0.4003211420000001,0.412131035,0.406880189,0.42222816799999996,0.399253402,0.40388482000000003,0.392198617,0.43394016199999996,0.411120162,0.4225195199999999,0.412696075,0.41920676399999995,0.41398732700000007,0.404715502,0.42664646900000003,0.445076814,0.384457977,0.42855273,0.39564793200000004,0.436067078,0.42520219600000003,0.399044281,0.385436063,0.3634303649999999,0.42048852200000003,0.433069536,0.395349519,0.41846738800000005,0.393065637,0.39658869500000005,0.400865634,0.38499991,0.41487311400000004,0.41923663400000005,0.399311324,0.412864696,0.41631269800000004,0.44270367,0.393381946,0.39013429599999994,0.38985630600000004,0.36921935399999994,0.389376792,0.43361693400000006,0.41076732499999996,0.43552727700000005,0.408706618,0.411711182,0.397787065,0.39777307700000003,0.406959668,0.40253236600000003,0.41685290100000005,0.412779214,0.39932180799999994,0.429819411,0.42893242400000003,0.40110116,0.39051679100000003,0.41569748500000003,0.423877524,0.377991216,0.40901095099999996,0.41068071500000003,0.421383602,0.390572077,0.419827496,0.394294689,0.40683788499999995,0.386883974,0.41229596399999996,0.413616904,0.44492398099999997,0.396919984,0.3864741280000001,0.431121865,0.399223914,0.41597395200000004,0.408693946,0.38971772600000004,0.385427322,0.37884388999999996,0.40301104699999996,0.37197421499999994,0.394633968,0.375929864,0.434044268,0.37926519299999994,0.384377651,0.448492512,0.39739788,0.409259947,0.41736372699999996,0.387188931,0.38604851300000004,0.393098398,0.42432784900000003,0.41159134500000005,0.385135768,0.419162951,0.35621174799999994,0.402819608,0.3998709489999999,0.406647362,0.397382536,0.416963608,0.413735813,0.41577005899999997,0.434241558,0.401454376,0.40120489,0.405654376,0.415540796,0.39219086,0.395254814,0.406719053,0.371258522,0.39533535000000003,0.400133283,0.41199958000000003,0.379690249,0.401702239,0.394775748,0.387792868,0.381665926,0.39306544699999996,0.39984071100000007,0.399904214,0.425877602,0.398839662,0.400327744,0.40929144799999995,0.39055825400000005,0.40304559900000003,0.414819887,0.440109061,0.423291745,0.423609098,0.43747403,0.39938993100000003,0.39888875199999996,0.433124213,0.409704547,0.388719031,0.37962925000000003,0.34825264699999997,0.40413528000000004,0.381980235,0.40586071799999995,0.38678928,0.432116398,0.41272255,0.403792096,0.416342936,0.380919224,0.393997671,0.40630212699999996,0.438718135,0.41411589800000004,0.431449493,0.405550893,0.367299184,0.39103708300000006,0.41196202600000004,0.415341387,0.380377444,0.39706807,0.38253005100000004,0.40583388299999995,0.40611724400000004,0.421146614,0.38255028700000004,0.419211991,0.38211715300000004,0.4056222749999999,0.40931122400000003,0.43640367,0.40122794,0.40886390200000006,0.425726946,0.424763883,0.39879450199999994,0.403315487,0.415259363,0.388750242,0.38127712699999994,0.392614567,0.38491828499999997,0.406297752,0.393068123,0.393601885,0.39310956599999997,0.41445882800000006,0.41217640499999997,0.424075112,0.399968433,0.41666880700000003,0.391332829,0.406247916,0.406718746,0.38799021699999997,0.397318644,0.406528229,0.42306739200000004,0.41562759800000004,0.40804055699999997,0.38699934899999994,0.393528096,0.391349617,0.42182872299999996,0.42066421000000004,0.409196375,0.42024330600000004,0.39844493599999997,0.39266678699999996,0.386738031,0.404312412,0.3992229560000001,0.414416688,0.423650048,0.395359886,0.418244116,0.41119878,0.42590869800000003,0.398432435,0.433402074,0.414761428,0.384160074,0.400375588,0.435016299,0.40611228499999996,0.414281565,0.417938083,0.40513854,0.406275786,0.39858761600000003,0.41970235300000003,0.40870185800000003,0.38283013200000005,0.410865523,0.405368741,0.387764159,0.390452095,0.413478613,0.42336926599999997,0.404289937,0.420743694,0.3808206329999999,0.407743737,0.41273992699999995,0.405246659,0.41780994499999996,0.3800825,0.40896942999999997,0.401852092,0.42608433100000004,0.38571691199999997,0.42786625199999995,0.40537512200000003,0.39662600800000003,0.409433587,0.414498513,0.389689122,0.416897331,0.405852221,0.40556666300000005,0.41671045300000004,0.378361347,0.391833346,0.40546798800000006,0.392942556,0.41879650599999996,0.3965398339999999,0.39448323100000005,0.415928633,0.39341648599999995,0.407079971,0.390134249,0.39128114700000005,0.420104225,0.39811936000000003,0.390647137,0.408807902,0.39176343499999994,0.39008178800000004,0.408812769,0.397367351,0.43051115900000003,0.407789448,0.402350131,0.393777815,0.404057242,0.424614218,0.395572154,0.40233360900000004,0.41660915,0.412033188,0.430461082,0.4159422300000001,0.385207447,0.38148304099999997,0.39774714800000005,0.40548303,0.383024884,0.40057354700000003,0.425391801,0.39627294800000007,0.385523874,0.42424177799999996,0.37419552700000003,0.40390362599999996,0.395809981,0.405701386,0.403224336,0.355031885,0.405418503,0.37583914100000004,0.37768134000000003,0.390690253,0.41369026499999995,0.410922883,0.39769956500000003,0.39553474,0.392822197,0.39554938000000006,0.40030176100000003,0.37310812699999996,0.394785655,0.4003319960000001,0.395595036,0.39236967100000003,0.412385099,0.396525982,0.420835621,0.41168052499999996,0.41527227600000005,0.42028240200000005,0.38847688399999997,0.386614893,0.43197952599999995,0.41597067600000004,0.41898327600000007,0.3893054,0.39030956200000005,0.4128106349999999,0.432570296,0.38565423200000004,0.41575375099999995,0.39834459000000005,0.456619414,0.395808689,0.38457779900000005,0.417080968,0.40140387099999997,0.439703655,0.415886834,0.43116338299999996,0.40044025899999997,0.412964838,0.368914088,0.42937151800000006,0.399939635,0.400383955,0.42233311700000004,0.395411301,0.39814558600000005,0.411330066,0.40571009399999997,0.403915899,0.428037813,0.3840358,0.390847893,0.431222263,0.37755494400000006,0.43477469700000004,0.401664815,0.37041475500000004,0.41407636800000003,0.393449708,0.409901773,0.391132934,0.38902506,0.3939507849999999,0.40057985199999996,0.396650302,0.39399722000000004,0.37924196099999996,0.410054906,0.407933375,0.397805408,0.40281937,0.417419444,0.382498109,0.402313595,0.39549293999999996,0.370593211,0.405330015,0.406719951,0.411935023,0.423956767,0.39719241299999997,0.421000232,0.38364714899999997,0.41658126500000003,0.361277109,0.42636340100000003,0.403551504,0.40617974900000003,0.40833138,0.38989642500000005,0.42224322999999997,0.406319999,0.413226074,0.44714267099999994,0.41485749,0.416020098,0.38197029899999996,0.405971574,0.395467371,0.389567426,0.42008712600000003,0.38867585400000004,0.400919377,0.439704559,0.416688421,0.416795437,0.40259483600000007,0.41831157399999996,0.38537385200000007,0.391256491,0.403117551,0.41240889,0.444251748,0.412676214,0.400234815,0.40021338700000003,0.385754771,0.39726836600000004,0.408433886,0.413128683,0.39693302199999997,0.41359302000000003,0.38602135499999995,0.39929681600000005,0.407915815\nMode total_node_weight_complete\nMode total_node_weight_complete Iteration 0\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 50\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 100\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 150\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 200\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 250\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 300\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 350\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 400\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 450\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 500\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 550\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 600\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 650\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 700\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 750\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 800\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 850\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 900\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode total_node_weight_complete Iteration 950\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nEC_PM,total_node_weight_complete,6.409,33.4574015578,0,0,0,0,0,0,0,0,213,0,0,0,182,0,0,0,0,0,0,0,0,0,0,218,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,205,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,178,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,240,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,165,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,149,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,168,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,177,0,0,0,157,0,0,0,144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,181,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,167,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,175,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,186,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,163,0,0,0,0,0,0,0,0,0,0,0,0,0,186,0,0,0,0,0,162,0,0,0,0,0,0,0,0,0,0,0,163,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,157,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,207,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,189,0,0,0,0,0,0,0,0,146,0,0,0,145,227,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,179,0,0,171,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,189,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,196,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,205,0,0,0,0,162,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,176,0,0,0,0,0,0,0,0,0,155,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,166,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,160,0,0,0,0,0,0,0,0\nTCV_PM,total_node_weight_complete,7.986,41.5540347499,0,0,0,0,0,0,0,0,247,0,0,0,222,0,0,0,0,0,0,0,0,0,0,249,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,237,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,235,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,281,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,197,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,206,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,191,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,225,0,0,0,191,0,0,0,199,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,237,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,229,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,221,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,232,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,200,0,0,0,0,0,0,0,0,0,0,0,0,0,240,0,0,0,0,0,195,0,0,0,0,0,0,0,0,0,0,0,196,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,201,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,242,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,250,0,0,0,0,0,0,0,0,191,0,0,0,189,272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,230,0,0,221,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,237,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,224,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,243,0,0,0,0,206,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,223,0,0,0,0,0,0,0,0,0,204,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,218,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,205,0,0,0,0,0,0,0,0\nLONELINESS,total_node_weight_complete,0.0307748415493,0.159252501852,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.857162286428,0.0,0.0,0.0,0.854160297438,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.852385015694,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.850597800469,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.853561718205,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.851214324374,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.85514181256,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.860064595132,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.850444607487,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.857649901329,0.0,0.0,0.0,0.853420761244,0.0,0.0,0.0,0.856509422014,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.850163621313,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.856717081986,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.854093187043,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.855270745551,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.857178294584,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.859980939292,0.0,0.0,0.0,0.0,0.0,0.859760369964,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.857572310018,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.854470594306,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.855022007643,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.853550346896,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.854713104603,0.0,0.0,0.0,0.855858583601,0.846884793431,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.858496767899,0.0,0.0,0.858002552881,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.852855497304,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.857666699513,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.847605750496,0.0,0.0,0.0,0.0,0.859203129802,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.854720075003,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.855199803207,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.852935277502,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.854607473074,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0\nQDS_PM,total_node_weight_complete,0.0135271995492,0.0700301668077,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3807187819836699,0.0,0.0,0.0,0.3676596208625632,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3709724271070002,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.37326591838098566,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.374350255064188,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.39210744545669884,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.36853719094240284,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3712049392381871,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3586464262452982,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.37367880136106674,0.0,0.0,0.0,0.3780486089935119,0.0,0.0,0.0,0.38700694423793414,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.385294282334242,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.37704343877724716,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.40632528231302445,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.38746375858400744,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.37575607847048,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3802116873848555,0.0,0.0,0.0,0.0,0.0,0.3699583375886948,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.37005314208639123,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3760890494752422,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3677849719032246,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3625166147576548,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3751066114236639,0.0,0.0,0.0,0.36731599429717654,0.39821807250432323,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.37408313517117314,0.0,0.0,0.3771112857490566,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.38167409550255993,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.36117407199112206,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3687000731706196,0.0,0.0,0.0,0.0,0.3968412219638956,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3669091190231782,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.36970597651137305,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.38099467145505184,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.35467121684794267,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0\nCONDUCTANCE_PM,total_node_weight_complete,0.00516550699525,0.0267910227596,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1332709572831001,0.0,0.0,0.0,0.1617855097771442,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.14921226381485309,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1532332450886118,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1490976245199347,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.14282405289459052,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12400262961217118,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.14660979191161844,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1380968716319943,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13516562561357826,0.0,0.0,0.0,0.1413310859041306,0.0,0.0,0.0,0.1359861118522543,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13942060552249985,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12942741937761462,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1469716849136088,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1458445116761887,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13216268521012048,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.15075627447221007,0.0,0.0,0.0,0.0,0.0,0.15032204257833925,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13555336736600954,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.12912344517675645,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13735638105277023,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.14924181345125664,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.15427994831174408,0.0,0.0,0.0,0.16133885812754684,0.12938937729727137,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.15547241229487005,0.0,0.0,0.14622923208220107,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.14010745210445147,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.14792230774429516,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.15284730006125213,0.0,0.0,0.0,0.0,0.13577654361940333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1452823862587517,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.15970739157130742,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.13455614265833446,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1458016424125671,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0\nMAXPERM_PM,total_node_weight_complete,0.01466428255,0.0759284923389,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.403285455,0.0,0.0,0.0,0.400073668,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.405273891,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.390433395,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.414788495,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.390029361,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.40699364200000004,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.43062934799999997,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.409954099,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.41551684699999997,0.0,0.0,0.0,0.409949676,0.0,0.0,0.0,0.423562792,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.414440067,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.42824054300000003,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.44074362199999995,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.39068953700000003,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.406109612,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.41458256699999996,0.0,0.0,0.0,0.0,0.0,0.40456989800000004,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4110564360000001,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.411896115,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.41193174699999996,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.396934428,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.3883107249999999,0.0,0.0,0.0,0.40030667,0.39229548,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.403835119,0.0,0.0,0.40468697400000003,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.423944279,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.412617757,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.36748280299999997,0.0,0.0,0.0,0.0,0.423828205,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.41259667199999994,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.39281842800000005,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.40900533399999994,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.400868863,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0\nMode smallest_node_weight_complete\nMode smallest_node_weight_complete Iteration 0\nMode smallest_node_weight_complete Iteration 50\nMode smallest_node_weight_complete Iteration 100\nMode smallest_node_weight_complete Iteration 150\nMode smallest_node_weight_complete Iteration 200\nMode smallest_node_weight_complete Iteration 250\nMode smallest_node_weight_complete Iteration 300\nMode smallest_node_weight_complete Iteration 350\nMode smallest_node_weight_complete Iteration 400\nMode smallest_node_weight_complete Iteration 450\nMode smallest_node_weight_complete Iteration 500\nMode smallest_node_weight_complete Iteration 550\nMode smallest_node_weight_complete Iteration 600\nMode smallest_node_weight_complete Iteration 650\nMode smallest_node_weight_complete Iteration 700\nMode smallest_node_weight_complete Iteration 750\nMode smallest_node_weight_complete Iteration 800\nMode smallest_node_weight_complete Iteration 850\nMode smallest_node_weight_complete Iteration 900\nMode smallest_node_weight_complete Iteration 950\nEC_PM,smallest_node_weight_complete,375.836,43.8353408108,201,355,392,393,335,319,285,379,337,383,376,318,436,371,271,374,333,347,395,311,430,350,376,432,444,374,378,396,324,371,391,448,379,372,401,362,402,358,388,414,378,417,350,299,317,425,293,374,384,404,364,421,375,360,408,350,297,337,316,316,366,329,334,301,378,268,415,367,438,347,394,391,347,353,426,419,349,335,396,338,369,410,392,350,387,327,405,388,461,359,361,363,311,352,430,390,369,274,317,375,423,397,450,379,376,322,349,349,411,387,436,442,401,341,347,408,367,326,385,316,332,342,427,376,375,406,374,322,348,345,456,334,348,260,381,427,385,395,330,380,453,438,454,351,329,393,372,308,386,392,464,382,412,336,321,366,335,376,395,353,397,431,433,421,432,359,425,364,421,361,351,434,400,426,387,379,376,293,386,378,406,386,386,318,367,445,344,387,399,350,340,359,315,436,291,369,425,323,347,373,340,387,340,472,390,341,331,353,341,444,353,308,360,341,376,334,421,468,342,348,419,384,390,359,368,314,418,372,316,380,332,346,385,365,433,440,301,457,353,380,374,436,342,445,368,393,373,370,317,333,478,412,415,409,323,360,395,344,391,395,383,400,349,372,413,404,364,424,390,298,399,381,405,373,410,373,331,322,308,375,346,399,330,349,528,411,349,356,322,403,256,393,331,413,371,395,323,348,376,344,425,338,357,345,480,370,385,294,416,431,400,418,320,391,399,364,487,318,391,344,315,471,341,410,337,351,389,405,374,356,305,309,460,439,367,303,354,401,384,384,298,391,377,335,412,432,441,440,304,365,320,347,357,290,351,333,372,364,437,364,354,362,406,370,388,390,377,395,448,398,358,344,434,412,304,336,385,332,320,344,372,340,375,402,363,343,372,328,336,363,459,351,431,412,315,296,314,369,366,379,400,423,401,302,340,415,393,418,360,386,384,416,388,405,420,326,431,384,392,302,435,371,311,413,332,433,432,421,490,344,333,330,410,380,385,368,325,279,351,393,333,335,440,397,412,424,408,389,387,292,473,293,386,324,380,320,347,391,348,408,390,390,375,355,374,388,497,348,391,370,386,316,419,331,392,303,328,385,400,339,367,337,470,356,357,413,392,341,369,361,344,382,373,348,407,386,387,359,310,392,401,341,407,365,373,381,344,429,369,389,301,371,369,373,374,432,327,388,301,310,368,297,416,365,366,360,347,371,341,451,406,457,350,353,397,313,313,292,359,322,444,379,400,394,442,426,330,449,326,325,441,384,280,363,430,393,412,368,408,338,410,402,375,375,312,319,396,406,324,415,295,396,428,343,288,378,452,363,364,336,322,404,360,461,360,423,393,384,314,404,417,278,453,345,414,359,317,322,322,457,447,345,396,351,369,338,309,452,386,384,442,322,409,289,334,399,439,331,417,317,450,415,331,416,323,361,344,437,262,359,368,306,354,481,397,416,417,430,390,414,420,310,430,357,381,423,343,343,273,389,350,452,470,479,314,414,457,362,300,362,355,381,328,306,338,372,363,306,416,323,397,338,406,438,396,430,349,394,431,354,405,402,448,383,404,390,433,412,396,434,359,341,347,318,380,416,317,446,347,445,403,394,354,406,376,390,478,386,363,333,380,460,408,360,357,419,384,402,356,370,389,404,414,403,412,322,403,377,391,425,397,336,386,384,383,363,325,357,415,401,361,466,381,376,367,374,319,415,347,399,347,399,411,328,381,423,371,439,361,366,351,344,356,353,264,395,417,435,359,379,432,400,362,407,341,355,406,366,396,385,370,390,497,383,331,450,368,353,411,361,436,384,414,350,340,373,373,380,311,315,336,437,464,341,327,447,373,446,441,403,395,394,301,319,452,391,337,431,448,332,389,416,409,430,342,339,353,386,364,362,330,306,416,324,374,439,364,382,431,454,425,306,409,343,287,407,366,370,319,365,300,328,403,413,460,413,398,350,306,401,400,427,413,359,273,417,409,389,415,407,406,384,362,406,352,393,393,386,412,452,392,400,341,335,414,382,395,405,320,393,375,365,413,386,406,354,375,389,383,415,290,368,409,328,373,327,407,367,378,395,279,317,365,390,380,349,303,401,377,388,399,321,328,466,364,421,478,425,429,384,331,409,369,325,374,406,429,418,425,437,336,367,366,336,411,439,406,323,353,363,441,396,403,402,392,378,351,354,343,397,427,347,341,395,419,303,407,461,357,380,395,359,316,413,447,351,271,381,384,368,296,349,439,349,385,347,349,372,348,439,327,406,399,411,334,377,351,364,356,338\nTCV_PM,smallest_node_weight_complete,356.667,34.9579191457,208,350,377,355,332,304,275,372,327,369,351,329,415,362,272,354,337,331,384,296,414,336,348,404,398,360,356,364,336,367,383,409,375,346,384,356,340,302,378,381,347,379,369,310,326,392,286,348,359,358,334,379,354,350,367,358,317,319,302,330,377,315,312,280,342,281,390,356,388,342,359,373,354,357,388,414,352,320,372,312,350,397,338,325,373,332,352,350,420,355,350,357,293,337,378,379,351,299,319,370,376,382,425,370,385,326,327,344,392,376,430,409,368,322,350,387,329,330,373,313,323,324,390,374,346,379,368,330,332,332,431,344,362,257,352,410,360,391,304,373,428,409,428,331,321,381,353,284,354,403,417,363,402,344,309,335,322,353,377,342,369,385,412,369,402,341,396,332,386,349,354,403,379,375,398,362,384,270,370,373,378,349,345,333,363,394,348,347,346,336,336,368,295,415,298,355,380,304,338,353,320,373,307,442,372,343,352,347,316,390,348,308,364,349,351,338,409,426,330,318,376,362,346,335,356,295,394,352,320,345,333,349,371,354,401,378,302,378,374,366,366,391,339,365,344,392,365,354,308,320,428,382,391,397,332,355,375,331,382,365,368,407,330,347,355,395,356,389,333,305,351,366,365,318,391,377,364,332,319,398,328,357,325,315,494,394,352,348,325,374,264,369,334,371,342,379,299,352,354,335,388,341,342,353,430,367,363,300,353,413,365,377,304,376,374,355,447,318,359,360,304,434,342,357,308,315,365,392,327,356,299,320,409,382,368,280,357,366,341,365,301,381,382,330,402,377,387,415,310,366,312,328,354,308,349,319,347,345,403,355,343,361,372,358,382,377,339,370,428,385,359,317,393,390,299,327,371,319,310,327,357,328,355,381,360,356,387,302,348,351,405,342,374,373,322,289,325,357,299,363,390,358,407,315,311,387,359,371,342,399,337,379,363,386,403,307,382,382,377,283,377,344,312,412,338,407,436,381,421,317,331,307,371,363,381,350,305,287,317,368,317,336,402,367,373,373,377,359,383,301,453,288,348,306,360,313,347,375,302,393,358,380,371,378,352,358,404,336,377,357,349,326,421,331,357,320,323,377,347,331,349,333,430,341,371,377,365,325,349,347,350,362,385,309,392,363,379,352,307,361,383,340,376,371,371,372,332,425,346,355,294,336,339,351,344,411,325,351,304,305,360,277,393,370,364,356,352,359,331,398,390,401,321,341,350,312,318,292,314,320,428,374,380,387,389,387,306,383,306,313,384,350,298,360,415,384,366,337,392,288,345,362,357,351,315,315,355,400,301,386,283,398,370,337,293,378,434,342,308,320,328,349,346,419,355,409,371,348,315,375,394,288,443,354,400,347,308,311,324,418,412,333,347,352,350,352,311,403,357,370,410,332,354,281,298,371,386,297,394,301,411,376,335,369,317,358,306,414,245,362,372,312,367,423,354,393,388,388,337,382,414,323,397,351,396,374,328,326,287,356,324,400,439,453,308,400,431,346,308,357,344,358,346,303,317,350,331,323,370,307,371,360,390,394,383,395,322,361,397,315,377,399,397,352,407,352,401,402,370,398,350,348,327,342,388,383,321,380,341,404,368,372,336,384,342,370,432,366,344,318,363,412,376,350,364,349,330,403,365,339,355,391,367,358,386,309,359,371,370,376,391,317,376,385,348,340,291,329,409,378,350,400,362,375,329,350,313,379,325,360,356,393,378,337,366,384,365,402,345,339,341,353,337,367,277,367,419,398,343,374,399,343,349,383,328,345,393,370,384,358,362,368,372,343,325,427,369,365,373,335,432,380,391,304,320,343,341,367,302,289,334,409,425,349,306,393,354,365,395,394,342,357,283,304,416,343,356,374,384,309,367,385,380,355,305,325,340,367,344,332,304,315,356,307,350,415,325,375,417,445,365,300,383,328,289,389,366,355,315,336,307,340,353,372,391,365,372,347,305,369,395,365,406,335,267,384,350,369,371,386,342,367,356,399,354,377,341,355,382,401,361,389,325,333,388,361,355,374,317,373,391,349,392,360,390,341,375,363,362,356,283,371,400,324,352,311,368,361,356,393,290,330,332,355,378,291,322,367,340,399,389,315,323,423,376,409,389,384,370,376,330,369,359,300,344,370,382,384,411,398,322,320,359,329,362,420,407,332,338,382,408,370,386,382,363,345,328,337,337,373,384,368,339,374,383,291,382,414,341,374,358,330,301,395,425,340,292,361,341,340,289,322,395,362,383,336,350,357,340,400,346,379,357,410,332,363,329,329,321,328\nLONELINESS,smallest_node_weight_complete,0.846653448452,0.00364402036519,0.853568138915,0.845785664185,0.842117469127,0.847554027584,0.845588743754,0.843224003366,0.851432280687,0.850850120496,0.8504620337,0.844716383356,0.844089240999,0.848328724739,0.841671707343,0.84695696599,0.852698720817,0.848722426963,0.847869348437,0.837529707767,0.848118104639,0.852282864886,0.837676185669,0.842547136512,0.843962538059,0.842221751422,0.848279729724,0.845974283495,0.841455513875,0.838373689777,0.841119264315,0.847194051625,0.844782676022,0.851454619788,0.844659854949,0.855892010638,0.846383670526,0.844422182712,0.846796808957,0.843870669947,0.843317056415,0.843342551079,0.845372507319,0.845197414246,0.844422038014,0.847419905377,0.844717561217,0.850082266237,0.851551626308,0.847434985423,0.842278071685,0.848910369036,0.848773241766,0.844510242744,0.847208211858,0.845221623192,0.846833546878,0.847905851948,0.843751044627,0.848639019112,0.847726065068,0.848950149569,0.845142781193,0.845418599634,0.846844410481,0.850988019002,0.850974750929,0.846674257561,0.844850657987,0.840066581084,0.839457062081,0.842998036407,0.839420808943,0.838259815479,0.849664323679,0.841025065548,0.844407454216,0.84200530614,0.84442741589,0.848450680081,0.843012159653,0.846234180895,0.849268966712,0.84637016974,0.841313309076,0.847362934567,0.842339412583,0.845312445251,0.848934166994,0.845301114856,0.838067394519,0.84627979973,0.850775304223,0.845915989534,0.853287497071,0.849652410373,0.849130016212,0.84996961942,0.853448172817,0.84674670636,0.844690609377,0.84232973984,0.846550606161,0.854962493266,0.84273947396,0.850314354638,0.835590083636,0.848976755867,0.852113355151,0.844983555275,0.849439773046,0.853721748973,0.846244084965,0.841658499823,0.842888240039,0.855154783132,0.844211050432,0.848105506398,0.846253781842,0.847101668036,0.845107194138,0.849921358857,0.852408833578,0.853955038808,0.846880587505,0.844581314631,0.845262962516,0.854151904777,0.843715148226,0.851893745284,0.845956748005,0.850539194138,0.844208483415,0.847935930808,0.846274773977,0.854057677671,0.848075979214,0.845453221537,0.843209341549,0.848532611499,0.856639539046,0.848091550421,0.843555001114,0.847819117636,0.847186236845,0.847561719364,0.849209560221,0.846019399022,0.844811928649,0.849236997229,0.843292316868,0.840267854081,0.844266557012,0.851492446816,0.840460650967,0.851347237293,0.843403952146,0.852236687006,0.845216896533,0.843252208504,0.8460801699,0.851079054398,0.845751342199,0.844543461127,0.846140034993,0.841892757929,0.83773584668,0.845878800411,0.842762604784,0.847315832809,0.84284958793,0.848845040232,0.851870285043,0.850511117402,0.846605935886,0.850640418062,0.842440602895,0.852251015606,0.843717309529,0.847849731666,0.845029853254,0.850572027491,0.840061498885,0.853922713828,0.846221480024,0.847916461028,0.852053893883,0.842354437363,0.847658176046,0.85406799726,0.847442795129,0.847979272954,0.847508480522,0.846539303845,0.855258268742,0.848959740021,0.848383933957,0.844777283937,0.846954893972,0.848621530795,0.843114317254,0.8452296939,0.846757909649,0.850230980889,0.842700946375,0.846686959973,0.844045972953,0.84465743985,0.848355765428,0.849809982852,0.848705162422,0.845663333806,0.84633674912,0.849512168568,0.847959087081,0.84625880261,0.847950129181,0.851928144995,0.842598966318,0.839130138347,0.852544785163,0.856263634649,0.845935604738,0.846898728654,0.841404211199,0.852267874359,0.849222897471,0.847984042945,0.846823640826,0.840891482931,0.845514892366,0.843762598226,0.849206895653,0.848727564859,0.849542791467,0.847852137255,0.851552612272,0.84763706463,0.846212343828,0.854177937618,0.848078148633,0.841857134742,0.845389510776,0.847055104472,0.847830130042,0.844762660108,0.850155100573,0.850479737718,0.847243057949,0.852205632476,0.849947298375,0.848241406787,0.841752275028,0.847650471513,0.843749457539,0.843924538807,0.853204676129,0.845047717117,0.847675747593,0.849288923524,0.842653219988,0.840998232566,0.845167717261,0.844649587533,0.848872307177,0.849451970708,0.847428697402,0.843494880231,0.844584682892,0.845757740341,0.847804443975,0.850056857146,0.847149332637,0.847065785513,0.850926240225,0.85217604992,0.836866747342,0.846084141075,0.844919831669,0.845759066686,0.850618616807,0.844981604988,0.847960000573,0.840269721199,0.845859737475,0.844895769075,0.842735228829,0.845560442339,0.845310405481,0.838971668652,0.848174755294,0.845784750127,0.846195376916,0.845610950343,0.85434133811,0.846520912445,0.852704952291,0.849280333206,0.85070261756,0.845617207935,0.847523795725,0.844746564819,0.84752747393,0.847137758561,0.846005990683,0.844989042392,0.845313912159,0.846086723625,0.845580786252,0.854791709263,0.850927618105,0.842671966267,0.849623101682,0.855333857841,0.84707528588,0.842202262486,0.846999218591,0.842936064566,0.84646829466,0.845915775482,0.84730258853,0.851527588473,0.848143469716,0.840175506859,0.850035494888,0.847452839147,0.847083169301,0.843602104303,0.848812114317,0.844190584036,0.848916318071,0.850103955239,0.85228238416,0.843963376959,0.83906972046,0.846788872723,0.841060753668,0.853824458159,0.846028650459,0.848194988833,0.845413284926,0.846373223709,0.850796308184,0.840298247003,0.845916549513,0.847513757371,0.846840543149,0.846262341363,0.848678406579,0.84803124846,0.851093104169,0.844951597759,0.854221677064,0.849543422595,0.851373624086,0.84574531655,0.852935174414,0.846541907277,0.851092494203,0.847781319199,0.847591114541,0.844106204258,0.840925508428,0.843152621007,0.847079422926,0.846886717654,0.854061144542,0.847399232622,0.848783917507,0.84325476845,0.843670080598,0.838983812062,0.846409634429,0.850618138805,0.853574163594,0.84923077988,0.857062021998,0.847847471993,0.843157968947,0.849942769783,0.840474572875,0.845403253929,0.846039790843,0.845868240725,0.844998873676,0.844273981075,0.843663248654,0.844869302888,0.840204155696,0.853468282875,0.852498420834,0.849311117428,0.849702257638,0.848409770661,0.84822506449,0.844269280666,0.84395852479,0.853448805873,0.853228871876,0.852092447485,0.849308377158,0.845851440258,0.843985749253,0.84580142012,0.843004556368,0.847085938277,0.848329595193,0.844155135276,0.847964874852,0.843800244389,0.852769243956,0.843670225853,0.850646763327,0.84593028993,0.847397214948,0.846047384737,0.844871378132,0.852330832687,0.844391328939,0.842276903697,0.84822527759,0.843791295138,0.8474409988,0.847711995759,0.853210926089,0.842799086757,0.84735267821,0.845659905289,0.841988790786,0.845004335903,0.840266034305,0.853491220018,0.846276197578,0.848195720774,0.848205598845,0.843296755207,0.848600990791,0.850150084452,0.848683979156,0.849166809079,0.852725411021,0.843101330662,0.843653976922,0.846113617002,0.843160576618,0.839824664958,0.848241727472,0.841842569129,0.845662760811,0.844046922843,0.843881311819,0.848233646545,0.843817666754,0.850313742259,0.844767177297,0.853958308728,0.846599370345,0.853830201548,0.847967899265,0.846838992697,0.848658566238,0.843944746797,0.843138704473,0.840711960265,0.837638715925,0.842737444517,0.848509807643,0.846912899541,0.843049603859,0.845279794898,0.851214383834,0.846501025989,0.848734772168,0.847096678743,0.841444359577,0.845420538595,0.8452310152,0.851826172855,0.845238150247,0.846564055013,0.8503896466,0.842739065012,0.846221655321,0.844473656115,0.845247053084,0.850839830193,0.847445054887,0.847799832635,0.843850059829,0.845836864536,0.847115771548,0.846757099561,0.846686995524,0.851672569521,0.844609828328,0.847133567413,0.841637235601,0.847256738959,0.847306478091,0.849508494386,0.848519589383,0.846786027982,0.847159466485,0.849649327803,0.845178454462,0.847902580013,0.844986748619,0.843712534348,0.852303525656,0.839389349395,0.848087367494,0.845153223483,0.844975520803,0.849498650834,0.849028616516,0.842393203548,0.848849309003,0.844079925993,0.849914994593,0.846134047582,0.84743126904,0.851827552839,0.848290730926,0.853119622655,0.838463237615,0.851476746204,0.844574014688,0.837366396091,0.846935445495,0.846068980428,0.853760952351,0.839961315241,0.845795278499,0.837001730101,0.846945844857,0.850311306644,0.849819129537,0.851599086898,0.851166097704,0.853008145243,0.850019736082,0.849501228605,0.846441136756,0.846998237365,0.847059805317,0.844380456341,0.839906864424,0.848522317441,0.852170444444,0.847834245601,0.847025471643,0.846951354809,0.843250980123,0.844835580169,0.850876508428,0.848905084109,0.843154759624,0.843111463925,0.846756216944,0.846721668645,0.84452381159,0.850382844894,0.850698072364,0.839012417498,0.847523112291,0.847921567497,0.846865559521,0.84614184111,0.845816141262,0.84754402114,0.845717606445,0.83779554835,0.848196094778,0.840977207127,0.848979564494,0.845046085978,0.849877972803,0.842144742678,0.83991883563,0.84815640371,0.84541067156,0.84961516429,0.846342935097,0.848027890914,0.846804132273,0.844647730705,0.847879732074,0.849049762438,0.844110707751,0.85096253427,0.84808857366,0.847846541952,0.850393552298,0.850670236601,0.845889866628,0.845012731295,0.845012338398,0.84809049806,0.842341119751,0.851818674417,0.841566435837,0.842629343114,0.846973549335,0.846191309358,0.846308153012,0.848146625589,0.846929901132,0.849993998338,0.84979642603,0.848016116206,0.840721389554,0.844925016288,0.844653596395,0.848852926364,0.847699233433,0.846858814712,0.846264643392,0.848791186791,0.842201946728,0.851054989641,0.842886193709,0.845876695164,0.844990707263,0.845476595808,0.848642535702,0.844323667443,0.851337901917,0.845804890768,0.846858764114,0.844386523006,0.853788916896,0.839285765759,0.849240993159,0.843344199381,0.848429922653,0.842380690793,0.845899728325,0.845275277098,0.845660034617,0.850741483027,0.842563781119,0.844760854704,0.840973521849,0.852905859812,0.848235366651,0.844660116823,0.84395593334,0.84716468424,0.84234512309,0.845393490575,0.852510437076,0.848059313474,0.847534725777,0.845942119322,0.845580625774,0.844423195349,0.849683146955,0.839773902075,0.844444723722,0.85284359877,0.848051649002,0.845558173562,0.84529998964,0.848233327797,0.844355810764,0.850180654551,0.853203314067,0.8460004782,0.84686257865,0.846913847155,0.850621557973,0.850896909139,0.844019711814,0.848285049537,0.842337357077,0.849358467604,0.846535519681,0.842878718999,0.851834669372,0.848890519682,0.842165472068,0.845797133223,0.840256392965,0.845680973444,0.84753853881,0.853441203161,0.844307897394,0.840404856018,0.846853811525,0.846917643739,0.842299266634,0.844087242135,0.853382391109,0.845915013647,0.847409863921,0.848430204114,0.842786412892,0.846683953759,0.848245714737,0.845928906967,0.846691900508,0.844110241596,0.847908821216,0.845748127528,0.84435353364,0.846247519637,0.848677840399,0.844283634464,0.842187983363,0.850259787033,0.849843246708,0.848019574642,0.845949749156,0.845672686146,0.849790859793,0.847806735617,0.85113622156,0.85011143569,0.84439859877,0.848027047804,0.849885829595,0.849929244116,0.851643860083,0.843318507977,0.848969667727,0.845722467707,0.844217210443,0.84824929844,0.851608125247,0.847736823462,0.845449139663,0.845892484929,0.845433971022,0.852399226413,0.84710666263,0.846731729751,0.846021705946,0.84868810986,0.846385096782,0.850936580097,0.838304230315,0.843309949238,0.844979711789,0.848536987264,0.837945358236,0.850695408742,0.84682712248,0.844689726453,0.846059362843,0.844389214614,0.848037738669,0.841085291486,0.847572656084,0.851874879788,0.8432842594,0.84565849015,0.844605706676,0.843871518537,0.841881815085,0.841906584506,0.846779249465,0.848530472542,0.8483957704,0.846920890393,0.85051318775,0.844586143021,0.853153764356,0.850033232106,0.835017802394,0.839156183159,0.847365334666,0.848425053796,0.844133584239,0.845984850434,0.847601649464,0.841160967565,0.847984453945,0.848007971378,0.844270987697,0.845535585136,0.844185550775,0.845470971235,0.853350423538,0.845963717004,0.845935670763,0.845606939527,0.849953974127,0.847042376841,0.844752441396,0.845995981908,0.847589941378,0.845992067189,0.838233616419,0.847729315552,0.846644681044,0.850145367976,0.841086016773,0.844752245993,0.844006431338,0.847746339326,0.844201716739,0.845288647506,0.844815219676,0.845127258073,0.843399996305,0.845287782606,0.844140789238,0.848868716729,0.848598561716,0.847516896492,0.84905536458,0.850968014951,0.846596655971,0.847435584331,0.851778770176,0.851081228698,0.832382150475,0.848200094547,0.85031899758,0.849456614661,0.847471989886,0.848110196138,0.846183710534,0.849197405946,0.843194678129,0.841672455552,0.852375236982,0.844765574923,0.84797610229,0.843476625945,0.844237204925,0.846159548506,0.846460515835,0.849196767697,0.845031462856,0.847934545081,0.848107395413,0.847907075148,0.844112044031,0.843707566171,0.840033404004,0.842441507452,0.851296306851,0.850605180983,0.845147691031,0.847500912429,0.849839882962,0.842634943655,0.849408398073,0.842586367205,0.850852613257,0.84631720822,0.846437857256,0.849133821206,0.846338286881,0.842863349786,0.846185770396,0.84634105354,0.847038267371,0.848819024043,0.850660651727,0.847235151323,0.840882274835,0.846495527827,0.842384904204,0.846102351968,0.852154109472,0.846537828306,0.849972294907,0.843419099646,0.846006298293,0.840651348366,0.846128802703,0.846307671845,0.842901385673,0.841576109656,0.844851570238,0.847615942951,0.845125908202,0.841253735727,0.844216352056,0.841398285522,0.847861608502,0.838309015023,0.846766530465,0.848523844929,0.838594454788,0.850169877069,0.847821161178,0.85034827029,0.85640616584,0.843297499944,0.844081183343,0.847314187323,0.850531555767,0.845060864222,0.846094217662,0.850164893842,0.847784080618,0.844241301912,0.853247784299,0.844866480013,0.848413056539,0.848040169627,0.842947733068,0.846482140957,0.846847798895,0.850892138172,0.84138692036,0.845234990033,0.851461514392,0.847989393985,0.852025040745,0.853496694972,0.843160166193,0.842729234843,0.847405816035,0.852783744449,0.851961465646,0.845363149612,0.849386293134,0.84263946259,0.843241131109,0.847727066491,0.847972342988,0.840299219504,0.852202325904,0.841416085802,0.851407278436,0.842362956618,0.846922171961,0.844220246655,0.845904176312,0.840535837393,0.847463476071,0.848844484573,0.847943989032,0.848030568631,0.841163000605,0.8497873352,0.848565112761,0.842833321003,0.84466717552,0.848295493781,0.848723225882,0.848188572118,0.848846746217,0.844124545683,0.850772143597,0.846106227896,0.842830506543,0.842546002745,0.837459496652,0.84816638676,0.849399116081,0.846496322887,0.838755353841,0.847567309016,0.849922588463,0.844910495287,0.846897121557,0.844586847237,0.850572385949,0.849805659713,0.843902900287,0.846663159612,0.849697721687,0.850040475304,0.851711380698,0.847028788836,0.852843203773,0.847471562404,0.848040582536,0.844572647802,0.84682815202,0.835531621944,0.84469014144,0.846321026886,0.846844446915,0.841339466085,0.852120508221,0.850259520925,0.8530278238,0.849265376957,0.847049987743,0.845373526049,0.845636219965,0.853487877343,0.843735936624,0.846122982598,0.848744033088,0.84337649715,0.847829595,0.84277206751,0.845019490505,0.840173924845,0.848763931638,0.839675963701,0.850780194306,0.838191890779,0.848528079065,0.852763954133\nQDS_PM,smallest_node_weight_complete,0.383238700413,0.014096448679,0.3661398894978205,0.3573448620615854,0.3829091807499417,0.3485759328776484,0.36053013082944024,0.38006810336584457,0.36777225500749355,0.3938184548437892,0.3801916372282037,0.39423714562660767,0.3874270126350887,0.3904930916944546,0.3772951261482015,0.35426873426498257,0.39316519202917194,0.3810296927855258,0.36674579116604294,0.3707228112128885,0.3749802228174684,0.3792329905509041,0.3863445649867388,0.39177368944693114,0.4012268468990266,0.38604455675617866,0.3804331554556068,0.38361808651352247,0.39723810285677114,0.37353299624891056,0.3992591841390469,0.4134895989201689,0.39764375444532507,0.40199899165119257,0.3702143370996181,0.3948647907508454,0.39039732312262826,0.38589894154634286,0.4033845524648149,0.40232875041360094,0.35884436520198754,0.3956665702150285,0.36835816876224137,0.4027546303678144,0.38024339688047104,0.41280681545284964,0.37708032468743286,0.35621728436686384,0.393063823864885,0.39010978140918995,0.3723613958471963,0.37597778432240314,0.3839679665702252,0.39177813876697404,0.3737705834375981,0.39631881164739724,0.3556275406192429,0.36922446394055786,0.3938714955887107,0.3722595884944375,0.3772654701782965,0.35833190520238,0.37583537594410754,0.3852580891817737,0.38629889191701144,0.37180802657305595,0.39281934740134783,0.35910248867225447,0.3927076446686173,0.36558632041490513,0.38391903702629,0.38464164343514545,0.3843700416902344,0.3935886100816933,0.3565144917866524,0.38128207676438386,0.3832096487052461,0.3901054792773183,0.36239740668507886,0.3960843023115026,0.37767542776601776,0.3924025781207296,0.38424344068275995,0.36972222993531634,0.373629941467977,0.37199224024350014,0.36178343479696545,0.3525400140293528,0.38965090768302424,0.3829870002007243,0.4072329654423113,0.3921897834493441,0.39667168404225434,0.39160074456591476,0.37705448482675413,0.40522407365931806,0.38155944294810257,0.3858709471493064,0.4053456760796177,0.3745817233674843,0.38843426113777557,0.37006213014738265,0.3967392467770647,0.3789532190051086,0.3890068519302041,0.3790149638920625,0.39800232290332194,0.41190911581033635,0.37421252047760395,0.36062341408863324,0.3843955700848795,0.3849339363744056,0.37509662508870983,0.3646426647894318,0.39411045642397313,0.3616526678415097,0.392968579714254,0.3889170431879871,0.37846749307654853,0.37891030225134953,0.37486416186221505,0.3792798402625061,0.37549863105359343,0.383126168810246,0.3716976985610947,0.36846231634882,0.3789646597774862,0.3681026803385436,0.4098823325276592,0.3851938955826617,0.4008543335214795,0.37617392478431627,0.39068930167008714,0.3712242826633715,0.3787109235649902,0.39073972714161387,0.37788709888395083,0.3939050493635037,0.3524223669814884,0.3946351039005217,0.38662380087818715,0.38472940327233424,0.3988890671486922,0.3955364680204971,0.40292751609600275,0.3891842705489156,0.38216687962249757,0.37431744005465917,0.40329000297290957,0.38983413716270504,0.36153936502361467,0.3631662519877898,0.3989271095511879,0.40639994642214955,0.39045731524752564,0.36871054342671367,0.38411429756098836,0.3850412262445527,0.3781113679102483,0.3721410879329165,0.3693634698972034,0.385407450745777,0.38650811447946953,0.3894080095075266,0.3979748007589094,0.3843493226830537,0.3967676645794374,0.36989186931370704,0.38846134858811354,0.3999296473482454,0.3733171227819901,0.37947092562941787,0.4105555120651113,0.3852909793387936,0.40006741822129765,0.39443623512752357,0.38043853570801495,0.37842133281298973,0.3658435084470816,0.3848947527542626,0.3941513359165919,0.3811605454667249,0.3811818072107567,0.38247090699720504,0.40823082035343317,0.36179666105052855,0.38381813440967055,0.3727779517195521,0.3556627761727206,0.37408781807164493,0.36197249735362336,0.37375700671725337,0.3725113481112357,0.36322064057613723,0.3847441346120243,0.36267924772155075,0.3605767372760981,0.35059435994177296,0.38766705762373693,0.3672674833085948,0.3869434011043976,0.3905244169553707,0.39538941261704413,0.36354909811439473,0.4009635973693278,0.40800395564645503,0.36751185529700964,0.3734907145634442,0.36080927570729854,0.379653942184563,0.4035592091074207,0.38627457764666245,0.3721820215791019,0.3916873833008674,0.38962918633337745,0.3783513945439121,0.3968654231776322,0.3786743820812452,0.39598591506476255,0.36541753612590483,0.37003597370887076,0.38692385527443035,0.3689624981948591,0.3860830502285155,0.3853119745999094,0.3980625847989991,0.3740082752084334,0.40513072135218087,0.38336747934084336,0.3421953947926297,0.3997873419607073,0.3781457829077066,0.3939073822092521,0.373547221800695,0.39022836873442,0.40445786192941546,0.3995131040026531,0.3990697955153941,0.3734194931224908,0.39485279669251894,0.3875977068287157,0.395344970486562,0.4028532723570879,0.40262555828635443,0.37545265538761546,0.4016501648323534,0.39033941156822577,0.3836452069477751,0.3987060484330981,0.39170608190971923,0.38157575366252167,0.3813124219910149,0.42082388725392705,0.37843539761875444,0.38532741500917245,0.37053845663600765,0.3854871798943683,0.36640477090546403,0.3643605453883314,0.3950469942979381,0.3931687600752353,0.37347468023637825,0.3587754493023956,0.3822966126170448,0.3881220274542665,0.38020951412711723,0.3875196404884602,0.3704111871489522,0.3894617734014568,0.38712088472556117,0.4037005130491577,0.38801939924313694,0.36153376210467286,0.3631762404268126,0.3933150006116581,0.3415397296655083,0.39040246912559384,0.37003673201205417,0.4030599022499307,0.3726516218742902,0.383134573489575,0.34921425568338577,0.376961893544344,0.3723674780897239,0.3859176590299679,0.38968719095008136,0.39721357828675574,0.3845251528769929,0.3802565322113846,0.3772937637984216,0.3791302970153004,0.38981784505916206,0.3575132951970303,0.3925419883449085,0.39914344862912826,0.369887980384199,0.3818992943152271,0.3884406618656896,0.39130340089867416,0.37656137422598296,0.39223039017727024,0.3878437041740144,0.3876776334742975,0.39181259868639134,0.375897420385886,0.39450614120695354,0.3741295371845253,0.387182332245748,0.401038829899634,0.4063159150236155,0.3565230586199495,0.36990997430554884,0.37646508590256783,0.3874747592602508,0.37184989008891367,0.38347291102968184,0.40976163597336257,0.3717188755074495,0.38241405135119794,0.37348221241865864,0.4012810556499705,0.4057424502793018,0.38248031019433043,0.3805454895723896,0.3751930456246711,0.37456970373925663,0.4142548595977528,0.3859516984850377,0.3768058294759823,0.3632173312158262,0.37960954025659943,0.3970647406495285,0.38033525519501177,0.37356469824662397,0.39754786292519567,0.3806867592626831,0.38765469381379747,0.3997072861503216,0.3971667068629291,0.3974549171499033,0.40066444361711034,0.3845314203670023,0.37271279018565495,0.3784677548674483,0.4049472507909404,0.3752015872477444,0.3821220897684365,0.3856768539806227,0.36956236845532064,0.4067016971469616,0.4031989511415494,0.3902995508342192,0.36553086286442027,0.39352657268888247,0.3831746882872222,0.375419137455951,0.38786485094756085,0.38365250561557845,0.37575560176737577,0.3816793255419083,0.41071242484782067,0.37079243229017833,0.3720898425630975,0.3434584753928233,0.41740482955399183,0.37990751251409194,0.3721587627926816,0.3910428231676627,0.39793879494376183,0.3860481432311909,0.3799828715438557,0.3903072397647702,0.3653353690059425,0.3778620552568307,0.3924019490239608,0.37479264006541063,0.39681663587492444,0.3949610489252804,0.41362947860006993,0.3642630377771446,0.39537066857280967,0.4002445567774687,0.3814726115471999,0.36610905348944434,0.4035974455014208,0.38996318603956565,0.37440603914090803,0.3801344705203344,0.3758460774438208,0.39066181866905036,0.38566735494928117,0.4010309443052698,0.38852261803660565,0.3811017589383064,0.3844305325588422,0.3930924547326186,0.3714732456679482,0.3752534087875557,0.3655556905402587,0.37656636952430445,0.39343003669509424,0.36421205787621985,0.36967494135086304,0.38373539310277494,0.39131078889616244,0.37373183832353835,0.39218626493821307,0.3729624094165943,0.38411220872730917,0.36351576055964946,0.375191518449824,0.36756195189009644,0.3821401249338072,0.3810792929552622,0.3769184568322268,0.39216645414314194,0.38987726219912905,0.3695920207439135,0.398694513695788,0.39316960765459985,0.3875773503915428,0.3919683097965637,0.37461814059326193,0.3727884568813953,0.3990999437934034,0.35277486015176784,0.38535495014616744,0.37289717324083044,0.38293660859468354,0.4018366924595147,0.40578322248016424,0.40673162627619164,0.3576330780420595,0.35967206411695996,0.37709169321491653,0.38021312840823834,0.3519249224722278,0.41010067620709423,0.3783271191538845,0.3600945563192502,0.39838631371192457,0.3749276200540404,0.3604722739646014,0.3838274673600796,0.379664877487772,0.38891150963657406,0.39548591455228166,0.3832617939207457,0.39539831174080986,0.394740045402635,0.3853237372026653,0.3821294857983269,0.3761272584544878,0.37024809271809467,0.41362356272733425,0.3832924844448528,0.3938283733442868,0.3841606176979299,0.3935557923908135,0.39587152988952057,0.39855777205710075,0.39201409945186944,0.36775422862664586,0.3950485123422328,0.36133413718959223,0.41928241589261245,0.3820814721322055,0.37288983117769486,0.3706999204098593,0.38101900172406705,0.38216912322819335,0.38363742793844197,0.4150185375139228,0.38851477846336113,0.359833464445435,0.38483244665431854,0.409750026678361,0.3757086059136291,0.39500642483564086,0.3799607434422974,0.39969419383493027,0.36571267540246555,0.3913545095489005,0.37699308924709496,0.39613378393122456,0.3965131269107607,0.36919455340891727,0.3825899339386922,0.3836641960453661,0.3600840454822525,0.3811173502198511,0.38515204635210126,0.3972626141753412,0.3753645841705742,0.388300422707508,0.4019467248503255,0.35501671647437194,0.4051118655921927,0.399817907274645,0.37345698715274206,0.3716281598255436,0.39156542377708103,0.3816031817463735,0.4113431842068639,0.38889819767231676,0.36193072022048306,0.41169050326429946,0.37468569245179884,0.37823296696770986,0.40209764101934137,0.3626904550192288,0.3681683941507842,0.3641288135852003,0.3865058526561822,0.38836124349211776,0.3672960416145768,0.4171662593204318,0.38647464890880623,0.39883268459210575,0.3981873292462614,0.3735962590184127,0.3561103889453227,0.3802165491230522,0.3812211466327696,0.3736645126831845,0.36202694124125856,0.38474424336437474,0.3783929273146777,0.38482657069112475,0.38643030561791875,0.3695675271095305,0.41063938004547273,0.3846408002513139,0.3930338979490368,0.37245767601742824,0.3677947276456316,0.36879353423023553,0.3648083528498729,0.3908055867930225,0.3522846619283267,0.3694966861834174,0.4014965903586156,0.37630869978578046,0.3853207258624367,0.3679867173989369,0.3833745551581674,0.3781063307086271,0.38528553688350364,0.3866430941433313,0.4026316757327633,0.3645436261467157,0.37341318024723064,0.39103067751433956,0.37451862860463947,0.35735969723146116,0.3920351236736968,0.37087204780394645,0.38789035155096824,0.3711672888716462,0.37154613187032454,0.38318702558274315,0.3743908327452406,0.39497213480957216,0.37567203785423287,0.4069110330639798,0.38842250830979924,0.3936062025322648,0.4283754441212906,0.37684762416031775,0.3967167296213878,0.3725387452651791,0.38513179123195695,0.3964098602553171,0.37971366062756406,0.38403693850946885,0.3900481541783831,0.37999435679043747,0.4028763405107014,0.3756440338929913,0.37562211042334404,0.36496270531243236,0.3553058790900315,0.39020568736425054,0.3896642099229484,0.3895860125540191,0.40001845233307776,0.3526843776318701,0.37906165863366603,0.3854338181086253,0.4085189921942052,0.3715231168688013,0.3743970570483701,0.3827816230174278,0.39191198043442094,0.38543323723635564,0.37014244925966416,0.3871430608739073,0.38209777970450426,0.3311301991712936,0.38339424745924267,0.3913229190355499,0.4080716118477941,0.38341245636466587,0.3768307287650962,0.36244641917175213,0.38141028303993507,0.37737900509058303,0.3829262828527815,0.37705571377300645,0.41185422274777783,0.39899932669157745,0.388539784359275,0.4046205332721382,0.36968211896547476,0.3615751319747938,0.3617522339030696,0.3900777059122411,0.40703333147095916,0.3719481457929421,0.3557047906315743,0.3738405980162128,0.3638281939397799,0.3904537604722396,0.384689609236086,0.38005433926948706,0.3818245566685028,0.3699023219128519,0.37772637171902157,0.3622134957277402,0.40422633350986426,0.3637706755334377,0.3980472176922575,0.3835253244360113,0.37918918977441757,0.3831014112501194,0.3820401863558108,0.3660879829263881,0.3602702267871102,0.39730293014257334,0.37721162910117995,0.3991235183689865,0.3805159629874071,0.36596289654063274,0.38477458020973926,0.3825124115723961,0.3706283937610247,0.3997481228607697,0.39080117787503194,0.3793120254271104,0.3405284766054157,0.3643209614251558,0.36015353530912275,0.36202585836787277,0.380883982605092,0.38848942801567454,0.4008948471599165,0.3869013061718438,0.38663243530067876,0.3929861478282388,0.36026985995846506,0.38424471396385806,0.38804831224521635,0.3629312761693431,0.39031210403410854,0.39149815886187683,0.3874207576860706,0.36966626245495215,0.38754374435194827,0.360865629184956,0.39935607955298214,0.3902686736029424,0.4023686584286164,0.37004321359440784,0.3827085364065351,0.40143935231973077,0.38631193558497706,0.38928249721019265,0.36574084515782235,0.38235025247303733,0.390183850309264,0.4056260405117896,0.36747921780808435,0.39979429163124874,0.37743799533046113,0.41159214442452585,0.3882978108970213,0.3750080971406283,0.3928158038987368,0.38788169952927753,0.3564712408620759,0.3656841844434208,0.38997947896299634,0.3823494345872627,0.3824227843317975,0.38117287436099323,0.3797956303458742,0.360658446942818,0.40215396802349807,0.3884068061135755,0.4161162643969056,0.38348802105385416,0.3783911877540022,0.37312700835251256,0.3808117630019477,0.38924795142397334,0.38519240328914617,0.3863450222253232,0.40085514955960894,0.4018419789173649,0.3822841321118469,0.3827478132198737,0.37969077263421447,0.40520667815920447,0.37233926897071057,0.3736380090225601,0.38880001066750086,0.38759600593958354,0.3795515536430711,0.3953760720627866,0.37837882906504106,0.3663318026285774,0.4003730305133428,0.367915294563276,0.3969478287662716,0.3619483872292097,0.38536181217263177,0.4000473380647382,0.3849582976035392,0.3789985955223194,0.39736146700161795,0.38176564712514643,0.36573258543246057,0.3806269474914929,0.38198719556118327,0.39439146302691463,0.385377855463911,0.3998359515025837,0.39307686747175163,0.38753815979162193,0.3970005409633982,0.3860170341098566,0.3774236015667717,0.38203737980063546,0.37152226475032124,0.3892932284436787,0.3971647361348358,0.3926496459128829,0.37819409513142505,0.3817065583009053,0.3949081462412834,0.38242003903060773,0.4056139954937792,0.3646558542803606,0.40955910412044844,0.3659699107919668,0.3748235635556941,0.38279374651009107,0.39164269229536053,0.38317064918014787,0.4074169542758882,0.4109154949703386,0.3745385881986254,0.38630862136844296,0.3477571308704731,0.3777225815109891,0.3874177694900667,0.3643002377481065,0.39347201844116786,0.36326790412361704,0.3916779689215875,0.3878469428349029,0.376292925665398,0.37475692640527336,0.38706743564256046,0.3773796551725337,0.409421956918401,0.4062561816793946,0.3780977792381149,0.3687151034398769,0.3731264709461824,0.39816559479739544,0.3799063737723736,0.38163069693146756,0.40312420625664125,0.3666661294974823,0.3942004483460703,0.40900179568118405,0.3827404952032655,0.3778152109736112,0.3972695653765512,0.40141433811354255,0.3774285967836877,0.3828279622189814,0.3898437786526296,0.3621303356604569,0.36276800304927526,0.3688049070634453,0.39916056107080045,0.3642968783750045,0.3737354181759478,0.37602933423093826,0.3694259888312779,0.3907168549475162,0.4111548005155073,0.36633669420502585,0.3667682220570158,0.4046323931442805,0.37821532176059564,0.391160256239285,0.39223906594320385,0.3729930781166868,0.3877658805284569,0.35257898580705604,0.38770040827027985,0.40569541924707986,0.36592945849209363,0.38403104051213977,0.3790107007013289,0.3774838911419955,0.399862635719825,0.3868557603771294,0.39713393833977817,0.3972064767548384,0.37409611314496033,0.3726331217416454,0.3769198165240857,0.3924168610688781,0.3675687648941746,0.3967645574223419,0.38837812827013,0.3828644028947023,0.40181274032227815,0.3572933169769031,0.37954981006375466,0.38937028970092863,0.3808353709104984,0.39831028619909226,0.3633340183160485,0.38565391371849367,0.39057112284223977,0.3809903664545665,0.37759689420121095,0.3714550935817008,0.36750431099245134,0.3941512787817204,0.3789183880578838,0.37942455362218924,0.3753762565074553,0.37870933236649673,0.3598980565871217,0.3734410451624062,0.3893375519504485,0.3652050607397833,0.3637215273269405,0.39781384488041516,0.37985471602293785,0.37207605791631626,0.37280033822861436,0.37814893388593124,0.3785677349781055,0.3849608730870958,0.3821110375815208,0.40034207664870003,0.3865612295265943,0.39210079625816563,0.39591456718501594,0.36518397572937217,0.3927925755105749,0.3751104295151126,0.37728535322714596,0.3820382596581099,0.38029483399639025,0.4016273612027234,0.3904316839512415,0.36141547966668336,0.3820436297733914,0.37633150481044936,0.38920614921743407,0.3821122865086929,0.38510212057886556,0.39053264916788166,0.4081684110059008,0.3896713124323685,0.38894313267626274,0.3855433341253649,0.36087708662644674,0.39488173909862356,0.39303063341361016,0.4065946914197607,0.3807778064170094,0.4012999434635183,0.37199126965173696,0.39863467559221355,0.3715069897694523,0.3740202524942716,0.3866835971714196,0.39922621118489665,0.40195060396431903,0.381588845127521,0.39818878893090903,0.38774269811932743,0.38489908416957275,0.38514057869974166,0.36519023415968377,0.3810867773035829,0.3563765226733559,0.40872435907542987,0.37896753744050765,0.3787423045043844,0.374130106245148,0.38724954220261226,0.4096462836491994,0.380375963238845,0.3934217937467944,0.3996899836164855,0.3956928609835368,0.37493815647494166,0.42621537442972074,0.3663554882113745,0.3902068579748002,0.3990335991460639,0.3807841463980448,0.3789400809647599,0.3932129122934318,0.38255495017869284,0.3880896393461732,0.4039001346820744,0.3836274322865792,0.3824119474369478,0.3714036384641282,0.3868195851240887,0.379970763551563,0.34906250102526576,0.3657815825829908,0.3788963391524515,0.3989310831907862,0.3848474459774309,0.355971879673248,0.38748886827536905,0.3710295970358108,0.4063431138477145,0.37164840918970904,0.37853070972273983,0.36562957290556636,0.407439628411677,0.3847187446726671,0.4166428708533224,0.3893875034490877,0.4031499931756368,0.38316862022471015,0.3714969446252225,0.3811760725044679,0.3930615049816364,0.3990264668816983,0.3828995285788991,0.3660865489836387,0.37823924007021,0.38066789783020055,0.3741749117931895,0.3876848728893339,0.38983559496428766,0.37175436216743424,0.40576823673413104,0.35904160179480993,0.37245171901139323,0.38422219175083216,0.37357978081732973,0.4135610349743706,0.3949169105885096,0.39477914139695747,0.37250947245502986,0.3956315862234325,0.39428137205062586,0.3648814311502085,0.3849111495152168,0.3749360961004238,0.3613737935097387,0.3756243418275903,0.3715076346819995,0.358713619283773,0.40803745145847964,0.4002273209588395,0.39852444412345106,0.39174201285422316,0.394804473220044,0.3700902037203485,0.39744018974381184,0.3918692047712442,0.3603269339538335,0.39145363157286095,0.3914308766316395,0.37744378217704366,0.39799600067559887,0.3905269507721467,0.3844524463224317,0.39765250969924615,0.361930867304208,0.3712777327894708,0.3984871412880748,0.38041698661016066,0.3660393940292367,0.39487913253235984\nCONDUCTANCE_PM,smallest_node_weight_complete,0.137203133473,0.00961827723927,0.1560085441740258,0.14286098411159714,0.1202127266183223,0.16702704756027695,0.1422990552979127,0.1445681623716964,0.15634519951153358,0.12462014030547004,0.13553521533857987,0.1413794371177096,0.11991961489354518,0.13010643795292298,0.14714082297775313,0.14771335948361908,0.15037418990465765,0.13569991572316764,0.14425782333615222,0.12522558202596296,0.14689761122863326,0.15439217573305433,0.14306937441044407,0.1288186241598141,0.1465997604862803,0.14888947925640794,0.13610401831067057,0.13800008141273662,0.13223380280593514,0.13523089511207417,0.1250768954494395,0.1558862136095845,0.14854468433087514,0.1293382066117929,0.15336709884119534,0.1325514118034979,0.14697054107262142,0.14938179932675208,0.14553663268657774,0.1334032632648813,0.13853710425482393,0.1368895364211556,0.12861365649416853,0.12517708766647997,0.13125137046500282,0.13486896546864224,0.13701615378513354,0.1373729960826626,0.14633432930620136,0.11813861546715929,0.14912594793556133,0.13567708282054292,0.1561581144453006,0.1344203632537563,0.15109133673662598,0.1263493109580725,0.12380931581272234,0.12670491160247532,0.13884088948158413,0.13609023531545072,0.13953658612960074,0.13598808411367555,0.14524567915428843,0.14420128273596194,0.1303110352863379,0.16214506439859835,0.13440102528728817,0.142538378540726,0.1320943556849711,0.12654385201647447,0.1288130353826083,0.14780267612121512,0.14043724712579597,0.13675141650724468,0.13550038167078038,0.11678380776011586,0.13266920668739046,0.1270630561098893,0.13375770680990554,0.15860380256046797,0.13535606151423815,0.14251643680825815,0.13305532467761735,0.12150333846758872,0.12862455588494262,0.12751557974959013,0.14047563438078622,0.14431585253621318,0.13898218286273287,0.13580609895486495,0.14414055435110484,0.13416403213992542,0.12985681546921146,0.11941670245824955,0.1364275268386603,0.12762395085327743,0.14333873135594558,0.14035790621672428,0.14205806852192424,0.14058803482639035,0.14787101442934614,0.14078087622606025,0.13732681666316787,0.13245640935567274,0.12865695675480399,0.1495607510782186,0.12561922862235705,0.11968700602645652,0.13024362867901648,0.14546662055308318,0.14813992255918837,0.13038095414589354,0.1359949877982756,0.11052229517477614,0.13402018522437997,0.14878761682604472,0.13668809647357844,0.13617969949915637,0.1305089466292174,0.1424845653326381,0.1266422808288047,0.1408626197171658,0.12985410815991616,0.13613533060924665,0.13971854965926428,0.12810878562788544,0.14443938111439839,0.1330948552905319,0.13625294208328695,0.13763504970944376,0.15038245795090097,0.14352191072437415,0.12082225669732588,0.14619503043860155,0.12512354595148883,0.13884963771559675,0.15325856483112846,0.13285049196664195,0.14249307148839457,0.12763405802106587,0.14861809260579778,0.1391400475348949,0.14131885471160777,0.14960796963741113,0.1294032901391626,0.1281876790135956,0.14452440402974262,0.15353636405486212,0.13604209315996668,0.14627719454706445,0.1320898720001773,0.12376423004571552,0.1472451867477831,0.1312003313051591,0.15008118855652308,0.13580345062565208,0.13272879985480354,0.13233206204629705,0.1255827607288052,0.12859005601933385,0.14640135607540672,0.13473361049307675,0.13881147654517634,0.12846666265949439,0.13737643933414706,0.15330051199619385,0.13020861811455267,0.14022327888924213,0.14233165486749427,0.14227529691006757,0.1251411531116257,0.12737916726345097,0.1481317823539377,0.11839439634098607,0.13505828540216075,0.13584219225361174,0.12700127701918634,0.13990830092212828,0.1345281162277395,0.14458314534108865,0.1488371845331332,0.14498233724195841,0.1408085337575441,0.1426340475960438,0.14091102196649563,0.14052143829925234,0.13547564651356483,0.12718657005482298,0.12709753557220715,0.14189401489925216,0.12855265887841236,0.15959790888003458,0.13308885305002702,0.1433602281313852,0.14168687926782952,0.15540651961271984,0.11707954242173277,0.13797984799610208,0.12939219922849549,0.14104120257305866,0.12357974569150193,0.15997701886977042,0.13763022958541382,0.1360352888033904,0.12967173996319165,0.1282673222625073,0.14205341252859938,0.12703869575830512,0.13842268556332427,0.1347015649967623,0.12516585631150826,0.142834638855668,0.13313745437685395,0.1457514082470691,0.13474001506012434,0.15490631440454558,0.13629551251278302,0.15236392720963596,0.1307899796807178,0.14201540982385666,0.14091955274704948,0.15371895496396287,0.13184115484138859,0.14628298496621622,0.14202146028436224,0.1383920421902661,0.14319880704658375,0.14677234387285398,0.13386846311124104,0.12000639004127155,0.14180958466956292,0.1553110893080187,0.1403988217617242,0.1131521132662883,0.13174841045122856,0.14968079946451665,0.1360921918634371,0.12281399944119566,0.13843316183010712,0.13592693324575122,0.1660338093859227,0.13188112274365427,0.14789783965832465,0.14890235913456107,0.1463526485725553,0.13752776145175608,0.12706142263674503,0.11881298560308616,0.1387681303157734,0.13679187637484252,0.1478819878245838,0.1311049021552467,0.12988074497029217,0.1437101506798109,0.12892064607948742,0.13549765522382137,0.14776985081427044,0.13279158734897842,0.13693508927898204,0.12830506743342177,0.1302207130021382,0.1461811030425162,0.12078752368629248,0.13432974869954364,0.1499798459239695,0.14661122298244267,0.12896608758447287,0.13679058617960915,0.12325466268467498,0.1451893206001019,0.1376136168048948,0.1581290740345725,0.15367350149823572,0.14565172447666622,0.12454721149531107,0.12020759866696959,0.143420545998078,0.15027972342088344,0.13771547889771885,0.1395483678712095,0.1433549386750389,0.14352349061304914,0.13086328920425958,0.13394572776138394,0.14283558522621295,0.13774761160045232,0.1318575948222929,0.12833692280256986,0.15058282156683245,0.12053529710559545,0.14074847981215932,0.13878058123671633,0.14055486160955444,0.13285660812357492,0.12796871375907726,0.12215183054756198,0.15719707383920603,0.1250724791395303,0.12289163081013614,0.13789373452472473,0.1403078471843586,0.13366000338022638,0.1248301184784007,0.13675956973462375,0.14580119955580284,0.138300782361152,0.12774769258526766,0.1282859287746577,0.13946783792556847,0.1566195834632761,0.11379528524067727,0.12167715219306362,0.15783632317512966,0.13043406648366435,0.13118284363800872,0.1343763015490956,0.13326610421102905,0.1336057676771414,0.13542793556562527,0.13337875444378836,0.13749962974260765,0.13895881013867772,0.15607187025651983,0.13817281068492332,0.14826603644515213,0.12481054438109172,0.14020430384416238,0.14271848049826885,0.1309333216899627,0.13386367610714411,0.1310751353641553,0.14468721400684126,0.1385062237288144,0.132301349714502,0.12157433014870798,0.12116505057531783,0.14359553387584795,0.14926515499106735,0.13506900787146306,0.13701454784243464,0.13505775347907356,0.13454209929068756,0.15461113984143338,0.14288013065040053,0.1390162770440677,0.13482074653173534,0.13853788410086654,0.12935904318757419,0.14173093711204465,0.1378050114045344,0.1246900628083954,0.13786317141110174,0.14149649806203096,0.13395440575659295,0.16285564629007665,0.1411909971618747,0.14782264651993973,0.15187431226792816,0.13346997573741107,0.1322926750015639,0.1394066211015873,0.13834525733340222,0.1374826637598125,0.13589397731807368,0.12210695003023112,0.13950004723583334,0.1418600604607978,0.13376990997372643,0.15682646254306115,0.1387014658385309,0.14039841871832373,0.13682019522554767,0.12158871829158889,0.1447009278337713,0.138150553771316,0.13010167421993393,0.1443292495996339,0.13762587546578292,0.14981661317625078,0.13545260198492165,0.1481090602913635,0.13804861694787124,0.13073445105382495,0.13994171140410638,0.1203222242065877,0.13679083687135615,0.1451600317834999,0.1266846650908225,0.13358650535854302,0.1728773813839821,0.12170294494428205,0.12801573047556764,0.1312311587111124,0.14544031482328054,0.1400484655029874,0.1416484910798422,0.13800866152763835,0.13399802665901706,0.1489774182497837,0.1555425106143849,0.14634496799281188,0.1525364854602983,0.12825251892284534,0.12399041901771232,0.13727372402708934,0.12397908592198079,0.14169101569528594,0.13795154213028024,0.14998105064560163,0.14240087179339286,0.14431446936883116,0.14323195195693192,0.14553783604832118,0.12179857571664912,0.1483436595227288,0.12939599987519146,0.13751989315316732,0.13077056006916718,0.14095619855560676,0.12549358737100996,0.13139616476660898,0.13131362254152454,0.13550039704998704,0.14892854545658135,0.14735729664039865,0.140346201170795,0.1440301585858227,0.12272211570700159,0.13312287873332146,0.1447686474214583,0.1413147008620032,0.15909845436343853,0.1274468484787474,0.14456315550711543,0.13945269037580668,0.1476032648377889,0.141236811963827,0.12668198585641527,0.13491045127455778,0.1388809428114838,0.14432927712682356,0.1402295879796147,0.13829848048480803,0.1330003398331031,0.12133351725004704,0.13686432433889004,0.13469001449308388,0.13486071585047682,0.1494200537115966,0.1401866409362037,0.13398148214566186,0.14208537996763318,0.12236398838599581,0.15524803853684568,0.1407231196327935,0.13817986371045293,0.15813289201483927,0.14303629385348585,0.1480829948561291,0.1353983415422748,0.13103956196613092,0.12801334925240324,0.1297220069997636,0.14709567848183877,0.15492698096281446,0.1361442818768204,0.13928915292157834,0.12027512044200223,0.13387316056719484,0.13923223765139378,0.14998799307120622,0.13210493570183193,0.11484048280733013,0.14422461558540325,0.11318197216752611,0.13532965141919823,0.1497955195476228,0.1459832567173314,0.14576911390225208,0.13308015338830045,0.12679438782910346,0.14204863544874707,0.1300583293242664,0.1345798973584222,0.14353756323968206,0.1346006564421867,0.13327700560747077,0.14846821044401795,0.12719777207585184,0.13355533093391836,0.15230400944566366,0.13667270866015116,0.13172882282155163,0.13877915829389711,0.1438564753873387,0.14671304011611824,0.15410609875881925,0.14326911612333776,0.13365355255327896,0.13781482199861136,0.13229564489615983,0.13674521240055104,0.15440900892225282,0.12169194329857146,0.13804037323998494,0.13345404520281479,0.1371653210656795,0.10382084205678856,0.14346720658848067,0.12413489979536624,0.14877360183325414,0.1318659562266228,0.13906358261427643,0.14211513730279335,0.12574880324018736,0.14437587578941954,0.1408706260632709,0.14242117758420153,0.15101752653443845,0.14369128806325784,0.14725018593249983,0.134916452650338,0.14440846178807698,0.14225245642026005,0.1296660986075925,0.14374688456811774,0.14094265931410713,0.14527133848670337,0.12847274317741397,0.15060178876270572,0.12338125527328687,0.13718619152817124,0.15839828458656988,0.14525556651727725,0.12906646416939946,0.13898670268143146,0.13877958851119448,0.13071479840872813,0.13602905333907728,0.13436577733450566,0.1406363852736769,0.1507508228572763,0.1421530359046792,0.13293960301807997,0.12844536352615774,0.14101179251912366,0.13458931991233794,0.14623443300121133,0.1404275926443269,0.14037079616448947,0.1536004477878403,0.1498271245006343,0.13228722255457084,0.13950487123320493,0.15930725988425046,0.12957768097581324,0.1309819329037233,0.14339222924046005,0.1273195748852555,0.13833374595159045,0.13856473928994578,0.13461578854108275,0.1353279605662074,0.11301870805618958,0.1170693679971765,0.1541878034010931,0.15044517169299404,0.12930172928143904,0.12119153165197444,0.1250030092749616,0.14472600543859765,0.12218818848101401,0.1470082795510545,0.13694418744756462,0.1362375945868189,0.1402656380545361,0.15794549254662646,0.14156008011400958,0.1318781682295829,0.1427756626431559,0.12386962765466998,0.13717393952748905,0.13333924346133205,0.14219998506478673,0.1274499184808512,0.13058880206093856,0.13855822718477287,0.1336335825268696,0.13509731357404695,0.12688188458643712,0.13216578568126305,0.1595213650708987,0.13458585917819332,0.15370498595643853,0.13292525584361448,0.1467067614760523,0.13183600828884579,0.1405842477759731,0.12249442068888365,0.1427401512783605,0.1289837710405983,0.12719782740298743,0.13911084441356786,0.146280822895869,0.1423779827650465,0.1392444497175046,0.13731713362985398,0.13059562798540905,0.14631582055545797,0.12478793563832602,0.11853343805006732,0.15869795650053925,0.12014739617682443,0.13147579734473766,0.12555051707659984,0.1276583428616101,0.13744736930592144,0.13141454740697267,0.14600526831538707,0.1437339427427839,0.1554098307264702,0.15215098070718164,0.11761208232523716,0.1344991663913276,0.13752709807760416,0.12755637945056628,0.14851286813859546,0.14465930479870812,0.13898077815046764,0.12536244056549628,0.13731260814791527,0.12180159795139263,0.14672753993745197,0.13999250709213076,0.1325691686622802,0.13932302083155435,0.11973833823962247,0.1365640065732142,0.14728559627172616,0.143312578138557,0.14349415255993303,0.14504045793911163,0.12911759636402179,0.14237545453836464,0.15022904020895197,0.15301793261073912,0.12286116263978358,0.12993458741641456,0.14120416063829783,0.13036579744998952,0.14888670723859423,0.146144674574648,0.13836034016334914,0.1435906138284597,0.13877743855976293,0.1365595748160946,0.11935544975308769,0.13844920608386446,0.13443281600960122,0.12877061989548658,0.13482105648743103,0.13216137490015675,0.14700462750717372,0.13511629145558726,0.13321636739368237,0.13701948434989122,0.14456226375493453,0.1411511405563969,0.1455817042370238,0.12735638099400937,0.13968287060463994,0.14035414453213213,0.12185931974168496,0.1324899935367233,0.14586191194202575,0.13519928115556853,0.14951011038487935,0.14106866756012285,0.14213327229841408,0.1347747178939793,0.12243014806627187,0.14132399944549082,0.1313505884031633,0.14896895447174574,0.11669166214760372,0.14944818866006726,0.1436266538690818,0.11843667560393444,0.1371697486017287,0.12550341527742953,0.13985145781577274,0.13597234755928803,0.13548472078325102,0.14954602104846887,0.11582338842838251,0.12796558445918,0.13582915543246665,0.15541998162587517,0.13063804556151096,0.14295823058372178,0.13971056928951753,0.13225611588493702,0.14677956490441477,0.1361813456272519,0.13935359665232724,0.14931949948296372,0.1405672566624748,0.1320596816805242,0.12917917235286316,0.14090122401982988,0.14662960751149998,0.12320749925137159,0.13541760985518533,0.12444502463479037,0.13854929042158953,0.12061262444984805,0.12651094475649052,0.1285503647891572,0.1493433304022877,0.13206336100763275,0.11677956340559402,0.14284335363654693,0.1389706759814223,0.1256513612382427,0.12525028771178068,0.1378408417892487,0.14235270736537686,0.12847648337987375,0.14150640302932327,0.14646784954847933,0.1359102484923337,0.1524495377930112,0.13381877089406558,0.12628908026590632,0.13488787243470432,0.13959957527960473,0.1459825506864494,0.1448192513998908,0.11976405881207217,0.132517190876678,0.11637430785077035,0.1323128683182608,0.15322997080773035,0.13669676029851285,0.12214593643638132,0.14026984507692258,0.13215994090657074,0.1328625884547314,0.11974335082116214,0.1329994093695763,0.13362161630610164,0.12194437509912719,0.12970938648790578,0.14315533889487464,0.13594248092707165,0.12683866287703247,0.12359508903704014,0.13193663372093517,0.13076277330920152,0.1196258504003424,0.1233930782897537,0.14509773774836143,0.14689389592869662,0.13604841425835543,0.146556962215219,0.12573644016250304,0.136859807405222,0.1541805194179494,0.13105890035725906,0.1332974432703747,0.13313624237164878,0.14039899934213282,0.14910139165574193,0.13392456388011195,0.13860946801824775,0.13176473435439315,0.1413896387423186,0.14417548912246217,0.12354422123924497,0.1260761671118578,0.13314781836301512,0.13610042914631787,0.13892284188951323,0.13803047116391695,0.15739335455140446,0.147329102516928,0.13304203693058334,0.13996634803373048,0.13506736382770934,0.13530367587956338,0.1368814747068506,0.13668585604084937,0.1348233086713592,0.14246020905773207,0.12115990080277816,0.12459872121161797,0.149168978164984,0.1486674750764031,0.1465566521721417,0.1285068174778596,0.1392061913137771,0.14138790578832877,0.13037365914076815,0.13707238820557352,0.14261338249159736,0.13767266949635637,0.12814007177726017,0.13692294649115255,0.14629381728094878,0.16622954576977964,0.1341486918022072,0.12783081131279103,0.15609469608097695,0.1415162493159701,0.1408680805932081,0.15077940241351298,0.1511038978582196,0.13709527422366177,0.1215478305630726,0.12614004924286273,0.15264099227478808,0.14351950367156024,0.14197411558567405,0.13407761027877962,0.14266009179836517,0.1312685987209904,0.14276585582384144,0.14228387071564957,0.13366121903753436,0.14113844096357758,0.1370729376213726,0.12866325920512584,0.13648261310679205,0.14794857719005813,0.13391685750448748,0.1384595171584651,0.14548150143757319,0.13564064757645403,0.1280422653830563,0.12722768023061462,0.13327548759077917,0.11205701044215058,0.13387091102069498,0.1290405667900583,0.13370029711269274,0.13467902086546665,0.13892875288522788,0.1299156109714222,0.14423266633346538,0.1417173205712842,0.13270704010169845,0.14666088885158973,0.12175927849725136,0.14336492825019717,0.12974294840007647,0.14089081233660103,0.12842146206929547,0.13908596162060524,0.12045122269049263,0.12806059892982777,0.1480562198590137,0.13898268367468766,0.13076231988802442,0.13669274741458254,0.13790903819834915,0.13725381404275538,0.14276649611377482,0.13016425883938795,0.14140877963086024,0.12671830870976217,0.13550919947273835,0.1315894239788901,0.13374969957019897,0.14507713253341817,0.11437644365388577,0.1334076220418328,0.136929959383374,0.14450208815028145,0.13931001995217843,0.12382496442471701,0.12917963157335843,0.12186573477281064,0.136823765066805,0.13134129098991768,0.13951262866285014,0.13027188329092929,0.12837515778857247,0.14295670861910156,0.1390533354901956,0.14249221115342242,0.1441253877126871,0.13452548687752935,0.12795124207571956,0.12215350195113713,0.14443417646128448,0.13854521013188514,0.12306469037930214,0.13869027542840365,0.13942369854147468,0.1400561802134954,0.1255765232744584,0.13842653848610467,0.14013576156358035,0.14126225943409157,0.11315865550232693,0.138107820640256,0.12464483661649875,0.14088197825690665,0.14412026373876552,0.13109446946361383,0.13072315945062854,0.1303263535969032,0.13872084515254735,0.13616970347257273,0.1291865153719205,0.13284232030766782,0.14255959574681473,0.12968451523632163,0.15550892740250397,0.12706888319979265,0.13372382168696995,0.13073535899749492,0.1374372053410741,0.1334169787256637,0.12140580765106329,0.12956416295100043,0.14420273080210216,0.13215483254026922,0.1362664252855746,0.12126991812063534,0.1282929490735163,0.11529831084183433,0.15055579262837854,0.14113507259941055,0.13715653867740688,0.14609550610094987,0.13635170851975445,0.16335527569290884,0.1441182364159821,0.14792930559076473,0.1359596701061799,0.13571124837583445,0.1467010102413108,0.1375714704239307,0.13616607600746714,0.14774685473480947,0.14683029621836122,0.15205247549055442,0.13147584871292864,0.14022967006933443,0.13243733709471586,0.12651971968860962,0.13020870405524418,0.13605339602952732,0.13235124243280283,0.13079059734176968,0.14146131698295972,0.14197581546893923,0.14142726347472773,0.1318081197086578,0.12704254126641337,0.1370026488597545,0.1409054422230321,0.12562930531263006,0.13836759471893736,0.1396702111908383,0.13273998160304576,0.14242072355368987,0.147144019798121,0.15190054433965866,0.12990874616498574,0.14558173667218471,0.13127446470755774,0.1317394847248411,0.13510122676691644,0.12361945368718025,0.13374177729373599,0.14815054874243,0.15417416501616424,0.13842107230508746,0.1307304955463595,0.1429710465653408,0.1415842832422908,0.11757613198675956,0.10825390173248038,0.1381642397458578,0.14068778298657536,0.13782104041743212,0.14806063040674078,0.1437856157236304,0.14540025939537252,0.1433420754853202,0.14709561649593572,0.14273293649547833,0.1388972624041911,0.13135529352845815,0.15576134971284525,0.1225522568580354,0.15934770647609545,0.1392772668346075,0.1554436557239318,0.1318260870864701\nMAXPERM_PM,smallest_node_weight_complete,0.4052868489,0.0166288507804,0.400072801,0.385489826,0.40177960800000007,0.36420345000000004,0.387397463,0.420613165,0.40166769799999996,0.41280242799999994,0.393192196,0.39013097599999996,0.393004186,0.410701724,0.398297563,0.41779487900000006,0.39927518500000003,0.420648486,0.397577855,0.391103977,0.393572662,0.406935536,0.40123420899999995,0.389601786,0.40770567100000005,0.40327062999999996,0.41511269199999995,0.40963978799999995,0.38992147699999996,0.399210215,0.41673578599999994,0.4243958669999999,0.40983539499999994,0.417870114,0.408904241,0.41574652300000003,0.40004243399999995,0.385043298,0.438391114,0.356354475,0.40922324600000004,0.42185905100000004,0.39424024,0.44687540200000003,0.405016736,0.41608414600000004,0.410894802,0.393219926,0.417322951,0.411400565,0.38628578499999994,0.41680796499999995,0.4149112079999999,0.439453309,0.42136735399999997,0.415986541,0.37615720199999997,0.409580538,0.400758459,0.391323145,0.389470106,0.37823435400000005,0.375787838,0.40243214400000005,0.41100057500000003,0.391974292,0.391726975,0.404728358,0.41959764400000005,0.386177772,0.39067681100000007,0.4062488979999999,0.381125935,0.41613587,0.363303276,0.41588155600000004,0.381829603,0.409676238,0.39304378799999995,0.43371450400000006,0.39415517099999997,0.38554665499999996,0.40911350900000004,0.44012769100000004,0.370671949,0.407326107,0.397174617,0.37975085799999997,0.43520450400000005,0.41013632299999997,0.40133754800000004,0.424539254,0.42920319800000006,0.408634001,0.40530407,0.39856133899999996,0.42458477,0.440408168,0.40263623699999995,0.391722821,0.42745947599999995,0.38913690199999995,0.40729254200000004,0.42456308,0.404928433,0.41484234600000003,0.403397017,0.452781555,0.414276353,0.39025505800000004,0.418367577,0.416067944,0.40212803,0.401176612,0.41446068699999994,0.380194803,0.39929413199999997,0.42110566699999996,0.38635314,0.39141733200000006,0.403925765,0.37815262299999997,0.413520428,0.418018729,0.41552741400000004,0.40181440900000004,0.395004276,0.430936966,0.420347978,0.430159762,0.418992364,0.39128207,0.411772225,0.405663552,0.389878052,0.41930922200000004,0.41178587499999997,0.413031526,0.3979809680000001,0.396170969,0.409716201,0.415045157,0.42761688400000003,0.39436403,0.45019542,0.428319254,0.410900674,0.38805813899999997,0.398925263,0.406638481,0.371046481,0.38814593499999994,0.42884203000000004,0.443311304,0.382703651,0.422391681,0.396591547,0.41864034299999997,0.403506799,0.39952858300000005,0.390135842,0.420803252,0.399324548,0.40602916099999997,0.407562497,0.392663382,0.404584884,0.41824624499999996,0.40886891099999995,0.412051837,0.39652214,0.39737954200000003,0.42501101599999996,0.430483201,0.41968021999999994,0.43595915100000004,0.408754242,0.390330984,0.39180194900000004,0.391982975,0.40802219,0.41314653399999995,0.394528738,0.39853721,0.428571117,0.386263069,0.404920124,0.43235821199999996,0.409967894,0.40885131999999996,0.39446897700000005,0.384493009,0.403644092,0.38778930300000003,0.405379989,0.417483972,0.383766126,0.38334962199999995,0.42737730900000004,0.3888677240000001,0.405966504,0.403770852,0.408997365,0.40471014199999994,0.393977102,0.403280866,0.38986034,0.408220379,0.38865219200000006,0.407882633,0.40604134300000005,0.427395356,0.38697127800000003,0.40409098200000004,0.402507645,0.394441368,0.40469827399999997,0.392739866,0.39208472499999997,0.37620351199999996,0.40959383099999996,0.41072231899999995,0.41747473300000004,0.407083624,0.42100059700000003,0.43030381600000006,0.396334213,0.42119986800000003,0.402856132,0.38196409,0.402919768,0.40045497900000004,0.39296802000000003,0.404130495,0.410584814,0.41143137800000007,0.42370063199999997,0.39939597,0.376371851,0.45054118,0.384255657,0.39198687900000007,0.413867868,0.393221591,0.409632553,0.429333568,0.40684642900000006,0.433688064,0.431158711,0.416647922,0.403208754,0.398292875,0.43464614800000007,0.414965255,0.3898544460000001,0.382526796,0.3961345009999999,0.408007208,0.4121179549999999,0.41922379,0.404124823,0.413374729,0.411635488,0.433921158,0.421379928,0.40297673799999995,0.436091479,0.39570826899999995,0.418960596,0.420582623,0.419899389,0.40508003200000003,0.39245621099999994,0.36722866,0.41315908199999996,0.404897577,0.409389897,0.397507239,0.41490659500000004,0.40124744700000003,0.39708220699999996,0.391721217,0.41556706,0.387466253,0.377950802,0.410064962,0.399728338,0.4109081620000001,0.3666814890000001,0.39073192700000003,0.416804497,0.39724709399999997,0.399974151,0.41729697099999996,0.40674910799999997,0.411989986,0.427924517,0.40452974300000005,0.40495593399999996,0.399548363,0.405888511,0.40685738999999993,0.418994816,0.39184665,0.392223981,0.397116074,0.40289498700000004,0.41087940500000003,0.40136625600000003,0.409485184,0.38209461999999994,0.397657627,0.391248204,0.434119839,0.4073282,0.414647663,0.43869081400000004,0.37598348699999995,0.40381973800000004,0.411353041,0.401866296,0.43654316100000007,0.383875964,0.40431007199999996,0.418482307,0.381033534,0.43745434699999997,0.414407496,0.424033175,0.409961101,0.40863690899999994,0.423396483,0.394900225,0.39278691,0.413728311,0.405669261,0.413698334,0.389092314,0.40736520000000004,0.41872404999999996,0.412344199,0.395079115,0.390808434,0.39321467099999996,0.39901939599999997,0.42303161600000005,0.397609558,0.40551276299999994,0.388741298,0.420732703,0.39835662800000005,0.40484541099999993,0.399845344,0.444972848,0.407710332,0.3916635,0.39617508799999995,0.400822695,0.40393891200000004,0.43529752799999993,0.42263247699999995,0.40240039800000005,0.40450335299999995,0.37459260600000005,0.44010037,0.39527284399999996,0.406537682,0.401961865,0.40529305299999996,0.414897638,0.398070864,0.43814808499999996,0.35485247100000006,0.40683893,0.422515876,0.375554469,0.43058595799999994,0.42609324600000004,0.41009061699999994,0.421942409,0.370755492,0.39889482800000003,0.400300394,0.397238969,0.40824905,0.40042339200000004,0.39884313799999993,0.401672147,0.42672155399999995,0.419036708,0.39760832000000007,0.410892476,0.443573731,0.410341744,0.38119318399999996,0.419026228,0.38614034500000005,0.391581509,0.39842721,0.40207698900000005,0.418929824,0.405497524,0.419572003,0.41242242200000007,0.39247837,0.38279669599999994,0.39599482399999997,0.39073764800000005,0.405254988,0.375018459,0.42321139399999996,0.40094800399999997,0.405123609,0.39850384799999994,0.42247661500000006,0.42097762,0.413923681,0.40701476000000003,0.41447049700000005,0.389111581,0.410180707,0.420383285,0.435971148,0.400520931,0.40409962900000007,0.40467703000000005,0.421979177,0.396460736,0.423896651,0.39931775599999997,0.428114063,0.43933848600000003,0.381998732,0.394065436,0.403780712,0.38495248400000004,0.375795038,0.401719147,0.428057548,0.376709531,0.413185445,0.40586501399999997,0.411750638,0.409879302,0.395389835,0.406683996,0.410152537,0.430820835,0.37850778399999996,0.40264048299999994,0.397718774,0.39564267299999994,0.394303089,0.39101174000000005,0.44877733000000003,0.4422010799999999,0.41350918799999997,0.42063489200000004,0.417584664,0.41583944,0.421829799,0.39652307400000003,0.395088478,0.37788281,0.369596318,0.435177789,0.39436818599999995,0.38431503799999994,0.376007226,0.39201791100000005,0.411068406,0.3882814659999999,0.439361287,0.39239099000000005,0.382719651,0.40054015600000004,0.43183384999999996,0.40251061899999996,0.380533234,0.430482158,0.43013886300000004,0.39496000100000006,0.38636688199999997,0.404322407,0.439298439,0.395818926,0.417261617,0.412596215,0.39596264200000003,0.412339547,0.38192284100000007,0.417825846,0.416947989,0.407124523,0.405705812,0.432020906,0.36102254500000003,0.418397391,0.423155605,0.428524855,0.396704774,0.415122436,0.37519025899999997,0.407577525,0.40411026599999994,0.37661891699999994,0.425941092,0.40372188800000003,0.405459549,0.38923253599999996,0.41640483199999995,0.39209678100000006,0.37362985,0.45023784299999997,0.43476728600000003,0.409114969,0.41559457,0.419469424,0.432426064,0.41966800199999993,0.39401586999999993,0.406679756,0.387328731,0.401482732,0.412923928,0.390281303,0.39667517300000005,0.38680936399999993,0.395770657,0.39834042900000005,0.421660736,0.411186263,0.392033415,0.38495667,0.39515410799999995,0.38738215600000003,0.40829379600000004,0.41615087100000003,0.410945245,0.39616098,0.40061992700000004,0.415740918,0.403604529,0.404750696,0.393810425,0.399045776,0.399275374,0.40192871099999994,0.4103919939999999,0.43573504,0.389618062,0.380297032,0.427888422,0.393625906,0.379806948,0.404879426,0.412772512,0.388016698,0.407219221,0.41758418599999997,0.4135516349999999,0.399916096,0.418978346,0.37461225400000003,0.43429049699999994,0.396219833,0.41541443,0.42364556000000003,0.412494249,0.42677854000000004,0.39233672499999994,0.43097503300000006,0.428104537,0.39415006999999996,0.428008463,0.39464425500000005,0.405119545,0.418358069,0.404030798,0.377026403,0.37251423899999997,0.409389917,0.410557761,0.412254538,0.412827714,0.42278499500000005,0.376402969,0.389464828,0.404895821,0.429959482,0.41211368600000003,0.394653332,0.41849263099999995,0.40057658300000004,0.42970849600000005,0.391372354,0.379749975,0.37261281100000004,0.35604915,0.402073678,0.400084019,0.404378986,0.432867088,0.405529047,0.411506047,0.41261254199999997,0.37750557900000004,0.41455668500000004,0.39807280700000003,0.43509978,0.41247902000000003,0.40704531799999993,0.428877851,0.40281575399999997,0.405676912,0.377117266,0.424174409,0.438799372,0.37799525600000006,0.407063311,0.40906667199999996,0.423356602,0.41216614599999996,0.40617232000000003,0.399586735,0.42877624000000003,0.40104008599999996,0.38684713499999995,0.407906641,0.44191519400000007,0.401377131,0.405603349,0.430726804,0.406591278,0.411857915,0.406354565,0.40897579900000003,0.387741332,0.418154444,0.39881944300000005,0.385651039,0.382126415,0.38419526400000004,0.41942327099999993,0.37922151099999996,0.39342505699999997,0.442075933,0.41333474000000003,0.40747224600000004,0.38221102,0.395857052,0.38790684599999997,0.385043708,0.417872609,0.40923597,0.40327526799999996,0.429452134,0.368782726,0.413081411,0.40885277000000003,0.404085311,0.41944500400000007,0.40377636,0.411848036,0.415736589,0.407964006,0.39916641199999997,0.383804966,0.390536403,0.430775658,0.391611073,0.38664956100000003,0.417913296,0.397608237,0.37499729400000004,0.3948484769999999,0.402082775,0.393150465,0.421838962,0.37909909500000005,0.40414448799999997,0.388948595,0.393885675,0.40589796100000003,0.40053613900000007,0.416984448,0.384543695,0.39866763000000005,0.41237279,0.389248266,0.395771468,0.420510768,0.42950539000000004,0.39011169700000004,0.41125005699999995,0.42830768299999994,0.38537340999999997,0.41760504299999995,0.430753188,0.421927269,0.39236528299999995,0.39498918400000005,0.389295057,0.39569271399999995,0.38567887100000003,0.37932682,0.403124329,0.41748953899999997,0.422835855,0.42808796299999996,0.396423098,0.395083837,0.397956738,0.418269632,0.427498553,0.416587692,0.420730816,0.418465421,0.366709018,0.412934925,0.403310036,0.423187905,0.39063465299999994,0.39596790499999995,0.405796669,0.410983023,0.40460127,0.421228157,0.406813193,0.41461155499999996,0.40188269099999996,0.39617240800000003,0.41299606699999997,0.419462411,0.403765714,0.41925971700000003,0.424325157,0.415519598,0.40683839899999996,0.38356199700000004,0.41950838500000004,0.37948580200000004,0.396625478,0.395727754,0.381617215,0.417669737,0.39863996300000004,0.388263851,0.411071021,0.432024136,0.4067731,0.426093583,0.405707002,0.41672416000000007,0.40594012599999996,0.409403551,0.415270604,0.37654966300000003,0.398494653,0.423305641,0.440911514,0.39598222699999996,0.410593757,0.383484111,0.39587838599999997,0.397962622,0.407505657,0.422239283,0.378264468,0.40499001199999995,0.40179146599999993,0.421042774,0.391602318,0.40575181499999996,0.38569744100000003,0.41877292200000005,0.41529072600000005,0.40355775400000005,0.388031069,0.399902672,0.434777852,0.39241486899999994,0.418417956,0.408895724,0.381043317,0.401059762,0.452039193,0.41530392199999994,0.406788867,0.438229255,0.429341567,0.41140149499999995,0.38640378500000006,0.40727548799999996,0.39187951,0.38791452800000004,0.39124490199999995,0.388500209,0.397808698,0.39029530999999995,0.40195604100000004,0.404156974,0.40376336100000004,0.429125266,0.39288797,0.39179803199999996,0.430252738,0.41726182600000006,0.40199506499999993,0.409958754,0.4144837759999999,0.412615059,0.40348069499999994,0.405099939,0.42133405499999993,0.411153905,0.390045211,0.410251503,0.40858125,0.42737097900000004,0.411749602,0.41991947,0.40763663200000005,0.418900203,0.393963806,0.40681272,0.41522328000000003,0.38125633600000003,0.421467525,0.39770871900000004,0.425714607,0.431276292,0.39939630800000003,0.397902423,0.39294650799999997,0.38675816700000004,0.408933024,0.40328812999999997,0.392886782,0.40868947399999994,0.39936521,0.40192685300000003,0.40252252000000005,0.385092078,0.42894666900000006,0.403145661,0.40467200600000003,0.386770144,0.39644381900000003,0.4159763330000001,0.40015056699999996,0.39557190600000003,0.410010229,0.38334448699999996,0.43538814400000003,0.3833916909999999,0.385042967,0.396520399,0.405374033,0.415061954,0.390427364,0.40592751,0.408569042,0.403941809,0.390619289,0.42126239800000004,0.380342985,0.41835576,0.41864022399999995,0.40231734,0.422853982,0.378949148,0.44053502200000005,0.397031293,0.37601356500000005,0.386687565,0.38910178800000006,0.41124032699999996,0.402948475,0.42604443699999994,0.389531123,0.40384677599999996,0.40774118,0.381713871,0.388428766,0.384847265,0.37775177000000004,0.416752747,0.416024041,0.387867899,0.40841505899999997,0.39924273499999996,0.41026897399999995,0.42748907,0.405343096,0.382573178,0.427983136,0.41676791499999993,0.41784962999999997,0.389987493,0.384415499,0.39690666799999996,0.438664975,0.39694165299999995,0.418474734,0.38353804199999997,0.435280406,0.4004671,0.38869273800000004,0.41366094600000003,0.416545436,0.4372847859999999,0.385878366,0.41784089799999996,0.398893485,0.419482052,0.367007011,0.45446222199999997,0.3879006910000001,0.420517068,0.43528837000000004,0.388960271,0.41054795399999994,0.42165634599999996,0.405583058,0.420086423,0.413523804,0.38145061599999996,0.424982861,0.417726098,0.396147602,0.424781309,0.40164683300000004,0.363522188,0.400813722,0.390085738,0.40269860300000004,0.38950766899999995,0.38733898200000005,0.390819253,0.387150168,0.39351880100000003,0.393682143,0.383777628,0.416390519,0.42180788500000005,0.425605211,0.41030616,0.434759666,0.382355583,0.40884747400000004,0.390338075,0.39233013800000005,0.417976511,0.42745998599999996,0.394936416,0.411254709,0.40283008900000006,0.411406561,0.374188943,0.44466111700000005,0.37361084800000005,0.43797032900000005,0.37848399700000007,0.38743749000000005,0.405445758,0.393172146,0.406445884,0.40413754999999996,0.409095012,0.430560421,0.41108566700000004,0.418224702,0.379898036,0.408990707,0.39138813000000006,0.396599718,0.40100052500000005,0.380342611,0.379361717,0.45824169299999995,0.416447935,0.40934166400000005,0.395814429,0.44154617100000004,0.39596127799999997,0.389252986,0.413672957,0.395974904,0.413905682,0.39975340400000003,0.39687967799999996,0.404416398,0.402294332,0.39391157600000004,0.405617015,0.382392407,0.401166536,0.4110628449999999,0.40128148599999997,0.39073450400000004,0.40994276\nMode largest_node_weight_complete\nMode largest_node_weight_complete Iteration 0\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 50\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 100\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 150\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 200\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 250\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 300\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 350\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 400\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 450\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 500\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 550\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 600\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 650\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 700\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 750\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 800\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 850\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 900\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nMode largest_node_weight_complete Iteration 950\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nError Allocating Memory for PaToH\nEC_PM,largest_node_weight_complete,156.996,57.5327209855,261,164,170,169,175,133,145,175,216,0,181,188,161,202,166,209,184,172,204,153,137,177,142,203,0,212,158,191,177,182,178,186,166,164,132,176,170,136,203,160,0,181,185,153,170,143,175,154,176,110,164,168,205,153,195,192,0,169,159,220,162,157,138,145,0,162,220,217,173,147,178,147,0,197,157,141,245,151,208,177,210,176,173,199,217,132,0,153,181,163,0,184,169,97,172,185,202,197,212,161,165,151,162,182,165,144,159,167,205,187,188,168,135,172,170,212,166,174,0,172,193,169,166,198,216,192,171,159,158,143,209,208,186,148,153,175,122,180,104,233,178,173,170,178,171,190,199,143,185,129,195,173,168,200,204,169,181,190,0,146,196,172,199,235,211,201,174,136,191,0,195,136,156,142,0,134,210,171,160,161,189,0,159,154,210,171,174,142,151,199,209,173,148,189,213,170,0,146,150,193,158,201,0,229,147,221,188,178,159,175,161,188,168,226,0,158,210,0,197,155,196,243,163,142,175,170,140,169,149,174,176,173,186,204,216,0,158,132,198,161,172,147,133,146,157,158,175,177,146,127,147,145,0,177,149,0,145,159,191,187,185,201,193,177,145,201,175,148,170,162,196,187,185,155,195,181,187,141,0,165,189,178,145,176,191,175,202,192,183,203,193,192,197,0,148,132,176,152,212,156,172,165,196,188,174,178,156,181,131,202,0,197,152,175,226,145,168,133,189,214,167,181,235,199,151,107,182,186,162,171,139,191,205,0,178,194,149,155,161,187,146,202,144,194,204,189,133,181,188,183,140,126,158,178,175,0,177,171,155,174,192,191,134,213,170,209,0,178,232,183,205,149,0,186,201,130,193,186,201,188,145,228,142,186,168,171,152,156,193,172,176,147,180,210,173,141,175,209,150,0,0,148,213,177,179,177,142,188,176,184,209,201,103,186,207,144,0,171,0,128,171,163,211,204,172,219,170,186,178,159,0,131,171,169,217,165,177,0,0,202,170,187,163,149,177,201,237,181,192,0,179,159,163,180,154,162,177,148,130,172,151,179,169,170,165,140,202,130,174,146,0,158,199,204,181,172,0,168,169,150,174,177,200,170,184,0,166,153,205,161,0,182,155,0,125,169,0,177,203,160,168,145,175,213,169,156,166,194,175,156,210,172,136,184,152,126,160,0,138,121,207,188,152,154,212,177,173,233,204,194,215,144,0,175,181,193,178,185,184,148,175,204,162,207,192,166,178,169,141,189,201,0,192,167,162,179,192,190,164,134,202,166,157,159,178,188,144,172,147,169,189,178,0,190,0,179,218,196,128,0,149,145,176,176,0,195,170,184,191,199,197,191,243,201,200,162,185,166,167,172,201,157,153,153,196,0,197,155,118,136,159,181,139,166,181,177,178,121,192,0,141,138,168,143,244,217,177,173,176,200,198,153,183,210,168,162,205,168,168,0,178,151,0,173,175,199,165,150,180,171,159,198,183,199,143,203,169,152,189,186,191,173,145,176,130,165,157,158,162,176,199,156,214,174,224,190,167,176,149,175,205,160,147,222,232,202,196,200,230,186,176,161,0,157,0,0,163,0,177,186,194,0,162,174,145,156,152,0,152,174,0,173,181,151,178,0,194,0,200,171,200,213,0,168,154,174,143,200,213,223,191,204,167,194,154,0,165,191,180,129,0,190,175,148,0,151,132,0,167,203,141,200,169,150,150,176,125,166,162,185,0,158,0,171,223,0,238,178,0,155,200,137,151,166,143,202,197,165,192,117,150,183,160,197,153,222,200,0,0,158,182,242,152,0,173,158,193,172,205,194,0,147,159,187,0,179,208,180,175,164,152,147,201,196,208,133,166,178,0,177,147,176,156,165,130,218,208,178,172,165,177,201,114,0,190,209,183,182,165,225,196,142,184,152,159,181,189,138,142,0,0,205,162,193,135,189,209,133,164,145,193,0,200,172,180,235,242,154,152,116,180,192,150,235,205,0,0,151,190,125,196,217,131,223,0,173,182,167,141,216,169,202,152,182,201,158,218,193,164,206,149,162,165,162,183,0,175,139,146,193,185,120,159,164,144,176,197,206,146,190,152,0,159,143,0,182,160,151,144,192,0,157,176,209,0,166,162,200,202,193,0,184,168,188,169,168,169,0,114,168,0,133,180,193,193,154,145,163,0,151,204,0,139,183,170,165,153,160,147,174,0,0,137,192,0,159,223,193,181,192,146,0,144,0,142,152,205,174,140,0,191,165,185,179,221,145,198,134,0,211\nTCV_PM,largest_node_weight_complete,192.448,67.925306742,261,208,201,194,227,181,190,214,269,0,228,221,198,241,192,238,245,192,242,184,192,210,177,240,0,234,194,221,228,232,220,229,217,211,182,208,217,175,250,182,0,223,222,205,212,187,202,194,223,159,210,212,240,186,244,222,0,212,209,252,216,204,171,188,0,198,251,255,212,188,217,191,0,235,197,181,265,195,250,190,216,217,209,239,248,168,0,199,219,211,0,226,211,135,212,225,241,239,249,214,219,195,207,223,211,197,196,209,244,216,224,209,189,219,215,248,211,215,0,225,206,203,213,237,231,207,209,206,209,197,236,263,217,178,200,225,165,212,158,268,222,211,214,222,214,230,243,188,227,185,237,208,218,241,218,201,224,220,0,186,246,206,221,272,253,241,203,176,223,0,229,179,197,183,0,168,251,195,196,206,220,0,207,204,249,223,224,190,187,245,239,231,189,235,267,227,0,192,201,223,203,239,0,261,172,253,240,210,192,207,201,232,214,241,0,211,249,0,224,192,218,273,201,180,218,203,192,213,189,201,218,225,213,235,250,0,206,166,232,203,213,200,174,180,197,213,197,222,187,167,195,198,0,227,189,0,179,205,229,232,217,252,237,215,196,244,228,195,213,198,230,227,213,181,236,222,247,188,0,204,223,205,193,195,227,218,246,235,231,245,218,225,225,0,190,181,193,206,244,209,207,220,241,241,203,223,202,206,173,223,0,218,174,208,264,195,219,169,241,263,203,220,263,219,191,156,232,212,203,216,176,230,241,0,222,232,197,192,209,240,190,232,186,242,231,215,170,234,211,225,185,175,209,219,209,0,223,209,206,215,230,238,191,252,202,247,0,220,260,235,232,180,0,219,232,182,232,224,205,205,188,256,187,225,222,209,200,208,236,201,210,180,203,230,231,179,216,254,168,0,0,176,244,206,216,220,192,199,229,222,224,230,148,226,243,191,0,215,0,173,211,205,248,233,235,258,218,225,209,186,0,158,216,227,270,203,219,0,0,246,203,229,203,196,193,227,266,200,240,0,223,199,222,214,213,206,230,178,177,213,184,223,207,223,195,181,222,174,211,184,0,204,253,252,212,221,0,221,194,182,223,222,240,214,233,0,211,185,236,212,0,232,202,0,167,231,0,227,222,207,206,182,223,262,222,184,218,245,231,183,228,207,171,230,187,184,210,0,190,163,245,215,198,200,246,206,205,262,227,239,266,181,0,215,213,232,217,213,205,201,219,253,210,253,236,223,226,201,198,207,232,0,233,219,218,225,236,235,211,170,223,210,212,195,230,218,185,204,176,218,213,210,0,250,0,199,244,232,163,0,201,192,218,230,0,244,203,215,231,232,221,230,279,254,214,202,221,215,199,211,233,179,198,191,231,0,242,201,172,176,194,211,163,198,211,222,221,160,244,0,189,188,217,189,265,256,208,227,212,238,242,198,217,250,202,208,248,199,201,0,228,196,0,200,222,235,207,194,209,210,199,226,221,240,182,219,230,170,212,226,237,219,198,213,178,204,211,214,206,208,233,210,251,205,265,236,202,225,198,218,267,200,179,260,268,251,237,244,256,231,209,213,0,210,0,0,200,0,215,227,235,0,192,203,182,204,202,0,185,201,0,228,217,192,221,0,237,0,236,222,211,255,0,193,212,220,181,246,254,249,228,226,211,215,197,0,199,230,217,170,0,237,236,208,0,196,182,0,205,244,188,222,205,196,193,225,169,199,212,230,0,177,0,214,259,0,289,235,0,199,217,183,191,207,177,255,231,194,233,167,187,222,199,240,188,233,240,0,0,192,233,246,196,0,200,199,229,203,238,223,0,188,213,216,0,231,220,205,232,202,218,186,245,238,251,174,207,228,0,216,193,217,206,203,181,271,232,223,203,206,200,215,161,0,205,234,227,224,217,255,227,188,225,180,200,229,242,178,178,0,0,232,182,235,187,212,230,175,210,190,235,0,246,202,230,249,273,195,188,173,208,227,188,254,234,0,0,189,226,179,230,260,180,243,0,195,224,212,184,236,224,241,192,220,223,188,242,235,203,219,196,207,225,205,219,0,217,167,182,232,235,171,182,212,192,206,210,224,188,230,202,0,208,177,0,222,206,189,174,202,0,191,243,227,0,206,216,227,227,228,0,225,220,229,207,209,209,0,153,219,0,177,229,219,233,193,197,212,0,188,250,0,175,217,213,212,189,218,195,210,0,0,180,241,0,200,254,233,198,222,185,0,169,0,187,199,252,199,195,0,226,200,220,234,241,198,234,182,0,244\nLONELINESS,largest_node_weight_complete,0.770277912364,0.256778874765,0.85138173245,0.855431165115,0.853924379795,0.858205330286,0.854609407784,0.853075059552,0.856026781186,0.858718510483,0.85652600389,0.0,0.854906701834,0.855759929799,0.854682733565,0.856975980045,0.857751994129,0.855369915889,0.854960464539,0.845380398222,0.854706502139,0.858998582308,0.853775899025,0.849978164468,0.852984073458,0.853873870746,0.0,0.854002395378,0.851834261865,0.849465252562,0.848787558078,0.859017047496,0.854856767077,0.861690562415,0.854196191356,0.864036863441,0.856425337136,0.853933584185,0.855236552482,0.850286324824,0.851744291301,0.857749753071,0.0,0.854631615097,0.854803736031,0.853469608606,0.851206609058,0.860733332218,0.859872763113,0.857376592037,0.850461167372,0.85941419125,0.856463845272,0.855062001635,0.85600075953,0.857229268513,0.855588659781,0.858322794746,0.0,0.856970078125,0.85619058718,0.854048930247,0.855811386586,0.854197372564,0.854226503324,0.858737384464,0.0,0.854370002693,0.852448303341,0.844455821345,0.851294930505,0.85467873811,0.847986637322,0.851351427919,0.0,0.851816387872,0.854164895763,0.854982927892,0.849187545684,0.856355678209,0.850823215902,0.855461527176,0.855755955133,0.856337727483,0.850056783885,0.855543606729,0.849798975513,0.854493330358,0.0,0.855448348365,0.853008026264,0.855031614193,0.0,0.853539014584,0.85906961678,0.85890583441,0.858812625558,0.859353097463,0.861084767873,0.853616067546,0.851073914144,0.853963487055,0.859707939779,0.865011546892,0.856578789732,0.859110068156,0.848476871529,0.85658753178,0.859305454507,0.855605217783,0.86012666975,0.862722394094,0.858912722314,0.854611975536,0.85522837067,0.862697910904,0.8545151811,0.85701806635,0.855043838824,0.854580279756,0.0,0.857558629666,0.856790156917,0.86165771532,0.858279228982,0.85030912951,0.85278799249,0.862627388901,0.855028551047,0.861185366147,0.854511660501,0.8567366995,0.856082401174,0.85543012991,0.855140338087,0.859918708596,0.859410140219,0.855540369971,0.856369526265,0.859671061693,0.865629910749,0.855194073709,0.855557266517,0.861620133264,0.861246415782,0.85478390938,0.854294039376,0.855788946181,0.853602960263,0.856563220531,0.855485945031,0.852916135931,0.856661256886,0.859304301979,0.851693014142,0.856911384727,0.850419351934,0.861121047321,0.852676220357,0.851762798594,0.0,0.860091149251,0.856282033673,0.856258198639,0.858260412631,0.851581906079,0.848762050026,0.854070711956,0.852739591429,0.857402873805,0.854170537367,0.0,0.858958998587,0.862706637051,0.856976108443,0.862352658001,0.0,0.862894297982,0.853881927601,0.851903565016,0.857103949962,0.860106448229,0.85185844491,0.0,0.854055575291,0.858210737718,0.857734122671,0.854133875582,0.854697445443,0.862570534436,0.86024425532,0.854693785945,0.854530702185,0.856710939092,0.86234983845,0.863199706622,0.85324607115,0.854240889716,0.0,0.854816244672,0.850816548505,0.853535089733,0.855183314507,0.85791068004,0.0,0.856007117261,0.854728107741,0.851396293214,0.857391776375,0.857060917354,0.855377854405,0.85591871373,0.856859933044,0.856152062581,0.857235812819,0.853907543043,0.0,0.86083783455,0.852019688363,0.0,0.858374433507,0.864370309826,0.856210073733,0.852337277988,0.852501323378,0.862227619596,0.858009713141,0.855952666065,0.860191793893,0.849852278647,0.853810200136,0.851290465679,0.85751421277,0.858268923418,0.857619109601,0.855713895524,0.863637407547,0.0,0.854254446958,0.866174333835,0.855730582305,0.854084254868,0.85464973352,0.85780211297,0.85680420419,0.857445987632,0.860019230527,0.859995280862,0.856217281556,0.861294010748,0.857524770367,0.858628430131,0.855968415425,0.85868591144,0.0,0.853354259942,0.859720621051,0.0,0.858934236129,0.858266596118,0.851910174066,0.852860084098,0.854157924557,0.855124480977,0.854756916424,0.857632678856,0.858449550105,0.85523289245,0.853313239232,0.858801822367,0.855561810965,0.85762597878,0.853416201017,0.860384714139,0.860921194403,0.859018446003,0.84695760206,0.857423035648,0.853758070679,0.853999291153,0.0,0.856809375166,0.855733475439,0.851797663924,0.854219784321,0.853267352378,0.85729308261,0.855403771285,0.854077534847,0.848840390322,0.854272085583,0.852381156967,0.85227800273,0.854791365554,0.860966589188,0.0,0.860720460139,0.861747867442,0.857735193764,0.854981939394,0.856536828394,0.854317583013,0.856958259942,0.856548742718,0.85287062409,0.854919059979,0.858409567854,0.854496396687,0.856608538603,0.860129910145,0.862610095448,0.852412067407,0.0,0.864403278539,0.855926380422,0.854845140491,0.854380590673,0.852102767612,0.861877236349,0.856178371716,0.857663584649,0.857919066374,0.853807322204,0.852893842175,0.854974508598,0.855055133583,0.854651995164,0.853628301006,0.858449574152,0.854978127743,0.855980014783,0.860060112688,0.861022573911,0.850526628136,0.85156441389,0.0,0.850386290055,0.858291930617,0.856375033541,0.86024290507,0.854959980865,0.855032120635,0.857806099984,0.848747564276,0.85769546277,0.853561043227,0.856682743852,0.858293459234,0.861045936581,0.86078171671,0.856004716973,0.852375025332,0.862704377313,0.859626262406,0.859691979677,0.851917221737,0.860821011832,0.0,0.858850608394,0.85766253402,0.858579095657,0.85450625068,0.850554675965,0.852528162637,0.858064221729,0.852419870441,0.864248854117,0.857769031776,0.0,0.852334424338,0.856561686836,0.848737682653,0.854776940335,0.859438397622,0.0,0.860186267462,0.86219142711,0.856712215473,0.850854851134,0.856462333348,0.848265466785,0.854947175289,0.857619247482,0.850668882571,0.855567207747,0.855350216192,0.852578074742,0.852909485654,0.853843416156,0.860518942786,0.859630416889,0.857986050103,0.861621544847,0.858273150811,0.859098198751,0.852173535471,0.849830754574,0.860370824335,0.86092128972,0.859518388054,0.856245706472,0.0,0.0,0.857363352964,0.853536306053,0.854810781018,0.853324960885,0.855921472251,0.857382712505,0.85341197367,0.8595019707,0.854859126937,0.857295241137,0.854136628327,0.858425356256,0.853276534273,0.854476526629,0.862857211816,0.0,0.854359354064,0.0,0.852089728252,0.856574355273,0.854804015541,0.856746278319,0.853531342569,0.856617383328,0.856049031168,0.856358514596,0.853927022359,0.853464251353,0.863258252679,0.0,0.857250045297,0.857661060981,0.852372039146,0.85527033412,0.857691773246,0.85152569997,0.0,0.0,0.851176751073,0.854124365647,0.853960805547,0.85608965542,0.852572700264,0.858243975079,0.850539583328,0.855271636827,0.85455782143,0.856251008362,0.0,0.859423746083,0.855216896414,0.851921413945,0.858869458369,0.856040145734,0.860185157468,0.854601543185,0.857906266109,0.857865135633,0.854462835448,0.853086373716,0.851713217558,0.84878574989,0.85315663291,0.856790015559,0.857551779322,0.856790602303,0.856471438777,0.859764360096,0.856201751598,0.0,0.855614488128,0.853767149652,0.851439908173,0.852734236892,0.857813389515,0.0,0.856406149788,0.857854006253,0.852381642053,0.854896501868,0.851879985489,0.85723143564,0.857612956882,0.856601631918,0.0,0.855083128286,0.853759454326,0.853773863444,0.858043360858,0.0,0.858600787271,0.857053307308,0.0,0.856076683485,0.854736699139,0.0,0.8582146505,0.853759302962,0.857098696737,0.859177977304,0.858982626935,0.855272667199,0.854595472082,0.855129442254,0.85548430529,0.858654231415,0.85078896206,0.856945329524,0.857066904759,0.850309381697,0.856781817345,0.857408964289,0.850520339151,0.859059766999,0.859439467106,0.85942297646,0.0,0.856351288596,0.860374295987,0.854181665129,0.856852804143,0.852212819727,0.860374705483,0.853803691394,0.847606652717,0.856357719031,0.852413772874,0.862136745242,0.85284584006,0.854833341702,0.850827904473,0.0,0.855169940491,0.856564295894,0.858327424954,0.858019288924,0.860728948072,0.858530767784,0.858541284032,0.860794030122,0.852428767153,0.857716041032,0.854337312677,0.852159166288,0.857532637569,0.857375804897,0.856974528653,0.856335380901,0.852830317939,0.854314772992,0.0,0.856244495014,0.859118032627,0.856063085127,0.853276832358,0.85464335265,0.854125113248,0.857518094794,0.859398225913,0.858799186763,0.849408746099,0.856858310223,0.85894748824,0.853047242586,0.853915245115,0.856469549902,0.857195584813,0.853623341351,0.847857739686,0.853441117884,0.850732357406,0.0,0.852114749207,0.0,0.853733480388,0.85167359063,0.854691736839,0.853234325745,0.0,0.85481534782,0.859168732298,0.852752756531,0.855963836213,0.0,0.859156138993,0.854163670604,0.857856019867,0.853825129361,0.858808042637,0.860002417727,0.856036644202,0.855155465042,0.853874995102,0.856407258381,0.855876334214,0.850010769203,0.858183773645,0.850931584335,0.856080005178,0.858640976238,0.855360636498,0.854793496069,0.858420120012,0.854705256656,0.0,0.85443825313,0.860323351282,0.851810536816,0.855872331849,0.855753692954,0.857036445758,0.858943211119,0.854474754747,0.852167939426,0.85854298083,0.852589671816,0.860050242833,0.853586452209,0.0,0.856508296868,0.857865075096,0.856705735618,0.854535974758,0.855841359758,0.853853609116,0.854225645238,0.856649269579,0.857894180058,0.846763343957,0.85669397751,0.85312748492,0.85864061603,0.855758529209,0.854572773766,0.855032202063,0.855210005276,0.862974152271,0.85214633224,0.0,0.852709465643,0.863097163,0.0,0.855197909969,0.855965936952,0.856412425864,0.851857593875,0.854980516774,0.858310702915,0.859168131709,0.854715337401,0.857009697049,0.85713959954,0.855451179301,0.858026817447,0.849918230162,0.857414748502,0.862327054736,0.852739036565,0.856120469978,0.853176133641,0.85774023888,0.853517194123,0.855579573644,0.860715906638,0.856387852945,0.85445342937,0.853502029578,0.860549836341,0.857584826248,0.851976282066,0.857785799527,0.851892454394,0.86132090126,0.85491141452,0.853632536431,0.858655631899,0.857051061311,0.85465568528,0.855171005606,0.848488528673,0.857896347167,0.856885059081,0.858767114959,0.852459177644,0.84851317732,0.858290496416,0.855780515906,0.851465658802,0.853617443875,0.86411906004,0.855780949233,0.0,0.856404974935,0.0,0.0,0.854903509925,0.0,0.854822009337,0.855972588562,0.85415173896,0.0,0.853251884218,0.856273265007,0.859096649819,0.855222256487,0.860278127004,0.0,0.860414884065,0.855699132163,0.0,0.856767348966,0.860845429554,0.857953209488,0.857889424237,0.0,0.84989257573,0.0,0.857360258715,0.85790775731,0.858042363125,0.851032814205,0.0,0.855177507162,0.852900061564,0.854736638146,0.86318917607,0.857481028103,0.852683530804,0.85716255949,0.856594630747,0.856440671302,0.856398698,0.85778438054,0.856072711756,0.0,0.854047286878,0.860349630978,0.848472333242,0.85739728893,0.0,0.859375448395,0.847871730691,0.859595977641,0.0,0.853639741785,0.853304865466,0.0,0.855984312396,0.847293397482,0.857349572209,0.861396699424,0.853737480759,0.854168759228,0.856843590965,0.853001611235,0.855046386444,0.853991531128,0.85563542783,0.857884337595,0.0,0.858026638723,0.0,0.851858649743,0.85776885123,0.0,0.845752467729,0.852284580669,0.0,0.857242560044,0.855604205249,0.855985405519,0.857856281361,0.850311267018,0.857234879845,0.854494357758,0.856488043403,0.854841921492,0.852347595666,0.856893510443,0.862454874366,0.855109749932,0.857621826451,0.854483091505,0.856539745919,0.857911546174,0.852926212301,0.0,0.0,0.855956143799,0.850844757212,0.857876509256,0.859480149046,0.0,0.852024174088,0.855356708462,0.850918454411,0.858334496266,0.851691973824,0.851147185978,0.0,0.858565563583,0.856898935771,0.854053054922,0.0,0.857665327743,0.858477806105,0.858571583182,0.860065626955,0.862226634644,0.85542714565,0.85947909316,0.855062828431,0.856530394695,0.847480916911,0.858451357122,0.860002640745,0.857291108116,0.0,0.855271656273,0.856514385194,0.860707265917,0.852529691331,0.852211076023,0.860365805379,0.85022232009,0.854737324623,0.850602336632,0.853818444519,0.853981342011,0.851910043035,0.854957240228,0.856503205685,0.0,0.857478332817,0.856460436263,0.850904482646,0.851404764345,0.851190906709,0.853651044282,0.860065910282,0.858366205341,0.85743325651,0.857415659553,0.855574343447,0.852954919567,0.858342436416,0.855714933852,0.861328735367,0.0,0.0,0.855768141484,0.856571882579,0.852967822168,0.857432538915,0.856614185894,0.853771030091,0.859411738172,0.857008125371,0.859724384949,0.852641000414,0.0,0.849398520899,0.853901077697,0.857778319213,0.854857881596,0.85492368834,0.854407253606,0.857205242088,0.855532593493,0.855003911097,0.855837485909,0.851443680238,0.850896100454,0.855624207812,0.0,0.0,0.852443347646,0.854389771238,0.854380020163,0.854527260845,0.84793026662,0.857822128694,0.853577701952,0.0,0.858620137548,0.858847544147,0.861336919139,0.862678001371,0.852430057144,0.854291091775,0.853660291473,0.859552503726,0.857597863901,0.855475035288,0.859587367231,0.856620672386,0.85329194117,0.861105013553,0.852655160764,0.853550407688,0.856686727779,0.852862143632,0.855372152725,0.856628790248,0.0,0.851800395715,0.857381506062,0.860862626091,0.859149479235,0.859802068062,0.861644827841,0.852283352437,0.855094650755,0.857100116153,0.858981767791,0.858989995864,0.85385300658,0.857110573866,0.854115197462,0.854536467874,0.0,0.854844521136,0.85639335157,0.0,0.855222938876,0.862295225189,0.855841951318,0.856226993272,0.857726000644,0.0,0.8507356859,0.856929020666,0.85456304345,0.0,0.857976560425,0.851860085799,0.85976714753,0.856470319462,0.854948845259,0.0,0.853224916246,0.857801889411,0.856285111177,0.856458599253,0.856386042129,0.861517344318,0.0,0.855697633225,0.851373872565,0.0,0.859389204262,0.859796291467,0.855470082511,0.84831554154,0.855235723917,0.860259983495,0.852869591313,0.0,0.856627645878,0.858066761571,0.0,0.857626450731,0.859234435112,0.859860127218,0.855952475072,0.863393129171,0.859521090421,0.863371291034,0.857936394399,0.0,0.0,0.856027336384,0.847506911128,0.0,0.854802802517,0.849766215409,0.851030522516,0.860103700807,0.857601885614,0.857702116921,0.0,0.859524997745,0.0,0.85943877404,0.859759444585,0.850593487243,0.856825561583,0.858514007056,0.0,0.85514525194,0.853323856575,0.852726820953,0.854018857908,0.856440653394,0.851402937806,0.857072628507,0.847601894642,0.0,0.857999162472\nQDS_PM,largest_node_weight_complete,0.337384680945,0.113154897182,0.35222160831100385,0.3580851375605728,0.3648991395263341,0.33692601733598626,0.35920023785462796,0.36896932875811367,0.3757356437794884,0.39192409123949,0.3691251241029873,0.0,0.3824432768649668,0.38307212824438186,0.3727369166301945,0.37539267202337456,0.38143266054244557,0.3787793170332966,0.3623536412994166,0.3657157013131095,0.3733756490769613,0.3696761792073936,0.37580001673545615,0.38527824304985164,0.38075680348066204,0.38368671972179863,0.0,0.38340162401573163,0.3800689591644876,0.3666608984990369,0.40519810110103344,0.3818949090593933,0.3838830685744048,0.3878491357801715,0.3612805999015742,0.3814334378499558,0.36538606489035114,0.35940642299455766,0.39444720602947797,0.35968345912792005,0.34515441307174927,0.3906284447750099,0.0,0.40096658176246996,0.3830339435951036,0.40967368341250743,0.37826955044042754,0.33608533910009947,0.38939414205429385,0.3854122510642812,0.35714992113187893,0.352986413263129,0.3716727411888351,0.3682305271716967,0.36030581189201644,0.38989310014275325,0.3682085226457591,0.3686554562382773,0.0,0.37149839472634527,0.360381920673482,0.3735140488187027,0.36235187126370005,0.3741958892404009,0.3775898099603818,0.36805476735220893,0.0,0.3573097048260383,0.37652228712606745,0.37732344337009505,0.3799807514847214,0.3780343464839225,0.3845871760523797,0.37847341375509447,0.0,0.373210510193565,0.3683746210833761,0.3637950748797611,0.37542247687698077,0.390418819473374,0.37388321232785227,0.38639265959631414,0.3790023898963172,0.3713837177368271,0.35533435758799564,0.39031285229969864,0.36535948506273247,0.3609153558414115,0.0,0.3748434171184714,0.37411565896620685,0.37611110582555096,0.0,0.37930881188229937,0.37787706118413456,0.39068338885843434,0.3668576984355555,0.38001335808396275,0.38021863475349393,0.38780110044623634,0.3942724493738755,0.36756717272040124,0.3665626762973046,0.36059760268597335,0.3568383461209361,0.3775227761387784,0.3728779136989371,0.3842885327618411,0.3594289634803946,0.3613217475954943,0.37514154055858,0.3922052944307714,0.35959743107800457,0.35759593806999784,0.3890819438514302,0.3708082561455344,0.38693370813608224,0.3842586122483756,0.36461758593019145,0.36229501291270116,0.0,0.39393699314052016,0.37208250887518063,0.35058455230135666,0.3555145063943942,0.3525984256095799,0.3480635782913014,0.3892125254967612,0.3960175769550412,0.3770847932582151,0.39887793855979004,0.3587885082979332,0.3923475144662141,0.37276465711510015,0.37600461453993594,0.4087919325985562,0.3608369998350264,0.3740992282842476,0.35825615904353153,0.3781435118281974,0.3785078103692917,0.38611975095973977,0.3796442645169791,0.3776069967955957,0.39375883745732826,0.3859280676252887,0.36710860502029113,0.3648866176191045,0.3770649458664037,0.38545017598572245,0.3461779322756438,0.3721309387825986,0.377501164579401,0.3910310538638106,0.3665831407021939,0.3549105110207641,0.38015657186815194,0.3737574561703838,0.3724464536913009,0.3837639325842212,0.0,0.3480142994853642,0.3948000448874102,0.38154700643446965,0.3829163913875627,0.39038263180138005,0.3982582064906297,0.3831008329441939,0.3540484609857324,0.38663694088304607,0.3770621160430371,0.0,0.37933285441664144,0.3739327371828826,0.36138455349631515,0.3780684858973903,0.0,0.36281897932403673,0.3675846284095733,0.38452746072257954,0.3611265430409103,0.357784856388158,0.3727884893020662,0.0,0.3828421682251426,0.36990163143782434,0.3707193709962803,0.3539054776710618,0.3525287497025496,0.3604006483153985,0.35885855895171764,0.3658192743370223,0.3849600452121999,0.37168889503140007,0.37196625333076877,0.3540904899739804,0.38882981943618783,0.34725990388577965,0.0,0.3748105332097415,0.37648207934680444,0.38375584001580887,0.3936420139448584,0.37420081792572685,0.0,0.3726238802441607,0.363141611395165,0.3721301941327736,0.3649989691863354,0.37523392609041933,0.3855233801205763,0.3840088702030224,0.35412635977208085,0.39394393335783284,0.3828931724728145,0.37788878577566243,0.0,0.37554169435677054,0.38752698343778447,0.0,0.38015285164797114,0.3755985982419155,0.3751078956564247,0.384433299622133,0.3673261008544397,0.3831008660402827,0.3684395043718593,0.3991250569299291,0.37804877737763015,0.3519952643034947,0.3856748606471461,0.3565991700335848,0.3875804045591458,0.3869430255165465,0.37555952674612714,0.37182708348105814,0.3948509300364512,0.0,0.37308381027512055,0.37741236323082905,0.36576669470943035,0.3779954712363619,0.3703960877692015,0.3880259029652701,0.3757536489201978,0.3684806983662664,0.3799320974211407,0.3859650340419721,0.37352495379012723,0.36751697516778575,0.38604977630477,0.35452644662500593,0.39012069632261553,0.3689194680269144,0.0,0.3539660178895837,0.3684940033371418,0.0,0.34341293130685363,0.3892932491109078,0.3907922936118682,0.36283506794862996,0.34385292216524027,0.3847587326921897,0.3750769901454059,0.37381410498458867,0.3829244182263837,0.3828620645286189,0.382334870021166,0.35465293948461313,0.3756064915270642,0.3821012973167162,0.3578753271631022,0.3649116141349456,0.3682313524805735,0.3628201366615133,0.37628559895635477,0.3727242977061041,0.394906084007705,0.3719125127084997,0.0,0.36547498979373655,0.380806080460961,0.39134252262798214,0.36168738890821295,0.36157620027244336,0.3617117921397345,0.3798897084779252,0.36565233797190705,0.37496226101129865,0.3593185910518111,0.37809828941193985,0.3819929123445355,0.3742714540816407,0.35701494776650056,0.0,0.3634743621487993,0.4007891102348918,0.386617556058628,0.3696931771352072,0.3968615718720703,0.3814695391676008,0.3691648090021422,0.386732769154333,0.390417505240112,0.39702448563618886,0.35808043933080963,0.36964957187725467,0.3945225480749775,0.39128361529452943,0.35258262012777813,0.3484300140482242,0.0,0.3861863020488813,0.3828808178395553,0.3847704839965488,0.38892015114610956,0.37310973744504944,0.35982313910438657,0.36576541769574555,0.4004710108803643,0.4091328751706144,0.3743147101621464,0.3747499133750118,0.38045716742215285,0.38400322829274774,0.3921459538904925,0.3797740065101779,0.38327285988205756,0.36531810183375657,0.35988462390945347,0.39932139703528596,0.37156090387313484,0.37663289677823764,0.38213395751585333,0.0,0.38384657051202636,0.3890578121258277,0.38977984289898604,0.37544798896387155,0.3917743952277725,0.36949727197120075,0.38489276747521023,0.3650364413556732,0.37533352288366933,0.36514373851377663,0.3788218614439785,0.3906390376015501,0.35617752079552123,0.41093625901707026,0.39461574267546656,0.3749379496893425,0.3641686955901416,0.38693868231060763,0.3735173326388283,0.3708245412253616,0.3734052840620832,0.0,0.3733568266622958,0.37788850910166066,0.3795024432805497,0.36340162405138526,0.38555956925845725,0.35311228673042694,0.397296470006234,0.36827651008239937,0.36456161149969823,0.37379514193976854,0.0,0.39387596711800477,0.3892495678675286,0.3888548457759613,0.36143647792046185,0.3717644351376809,0.0,0.3639856692346511,0.39493931011932115,0.3738434494865941,0.37742786917136145,0.3674978653656211,0.3880010037281148,0.38756016926743625,0.35707992934788296,0.3684293148375913,0.38565089639984995,0.3676948086318915,0.3821308176740815,0.3824007198085369,0.36646879912659486,0.36575504235520745,0.3800577898222941,0.3975392143620617,0.3823558339662923,0.37037217733433825,0.38034346066649954,0.3864680566056934,0.373160393704281,0.3808186700149117,0.3787904774337236,0.3736832238343601,0.38810202363026425,0.0,0.0,0.3902075000327383,0.3763319169215011,0.36968549296225967,0.38351566144598515,0.3677765419065579,0.35718260835322313,0.3664023479979486,0.3666956286289111,0.3809922737494812,0.3599000722327152,0.37101394406107996,0.3574988730093353,0.3725766355260333,0.3753873844623162,0.376290485005716,0.0,0.38535445854498357,0.0,0.3787587565220871,0.36616981300430235,0.34853176895496873,0.38633065536205946,0.3548153068652596,0.37888353758765675,0.34845982247941093,0.4013091215534372,0.4035358670339838,0.3824172174741339,0.40796130301438177,0.0,0.36267246944796727,0.36296208564586324,0.37960335940238693,0.36584812021531166,0.4096614974629582,0.3673902572164029,0.0,0.0,0.36792465157213583,0.373965817997725,0.38978103952384563,0.37751539467524614,0.37193694979328085,0.39147792722889135,0.38225534236799197,0.37082287951809384,0.37848020674756777,0.38502038621369505,0.0,0.3680809800847754,0.35323389860745735,0.383340375992827,0.3758807179541181,0.3874422708340808,0.38214872582742704,0.383902906414888,0.3739522486763109,0.3860834113053274,0.37849488432747713,0.36813166369927225,0.3786374709552335,0.3513868558897951,0.41195619346630047,0.38429899166064097,0.34673062476493116,0.3513665019378558,0.3645532002103454,0.3867352829219871,0.3789464430329177,0.0,0.38586258424708353,0.36773677220546325,0.3783516003679847,0.4001586210968502,0.37627460311982974,0.0,0.37088413849671753,0.395263080846176,0.3474323077850731,0.39708544682824065,0.3816340904669181,0.3745808069852585,0.3761483657793753,0.37256491825417565,0.0,0.3841369327300995,0.3443836386847717,0.38961942982487713,0.3790896248621818,0.0,0.36058962657898075,0.37649019841010783,0.0,0.35089460822901086,0.37212541643351976,0.0,0.3704979144440175,0.3739017249665561,0.3687157871409661,0.36759981115654206,0.40264843711418297,0.3947769461332194,0.36963547714614764,0.3808970514539417,0.3584394549821568,0.3809955917142439,0.37615829533904355,0.3813559199018956,0.3544679484284642,0.35301533175530053,0.37362426669226406,0.37149212405190485,0.3673034108423749,0.3888192095157139,0.38245884218555226,0.37664972184170326,0.0,0.35775835039214676,0.3541367432436595,0.3686816996435011,0.36036128332597456,0.3723997000866733,0.3555484520639198,0.3677585183532012,0.38318756846725427,0.3763047341878334,0.3672907643203168,0.36808804268384265,0.3860700144569151,0.3691435112021711,0.3791963907044139,0.0,0.35913714713189565,0.36806678846543756,0.3822561206872342,0.3893273345284671,0.3765126744868377,0.38141316126702923,0.3743609700187131,0.3826356492576498,0.3807027290355444,0.3558782900356572,0.3774828396742582,0.3649839155270306,0.36150006441978566,0.3542445433172327,0.40289867671489643,0.37561114772704457,0.3401425985185886,0.377257857160541,0.0,0.36825336114189633,0.38670405595090485,0.38180185839907527,0.3758579350895543,0.36068636557346156,0.37230360057762135,0.36637100876630163,0.36648832356521205,0.39792949071102635,0.3636861159721497,0.3890587331640468,0.3940015600096687,0.3973134309728566,0.4195529493006827,0.364650726231995,0.38153441177997593,0.37383820976047116,0.36274042317963534,0.3866704279247811,0.37151373009723315,0.0,0.3825644487117709,0.0,0.38527300001549303,0.37976699414347265,0.35852271675837916,0.3544108219348193,0.0,0.3878047675890671,0.3722739209170382,0.39061636441862857,0.37197843057216046,0.0,0.38101980392872614,0.3642128854143547,0.37871688222000555,0.3878107360883698,0.37272087204774906,0.37014497961029313,0.38475522625321207,0.3837158149922783,0.3592055950359184,0.37199505094161356,0.3720276280120496,0.3571438301830872,0.3683811722100429,0.393063977010664,0.37385035653504,0.3751913312067163,0.36708428796976544,0.3494606416750429,0.36910896998621945,0.3644900278141646,0.0,0.3654743925872332,0.3720269151679908,0.3667208855104103,0.37886689535018725,0.39432972354031626,0.3716571207981683,0.3422926537139932,0.3806370045000009,0.3698470096964831,0.40651419314806886,0.3601714698182587,0.36917124701730925,0.355024817344269,0.0,0.382946037612416,0.3655567840431622,0.3796796653514064,0.37274854810699487,0.3567286635858871,0.3783329548175731,0.3481582415567574,0.3820579765753106,0.38302518069887553,0.3881108342818213,0.36575967150497074,0.3671589993998078,0.3816240885143708,0.377490509251155,0.35022364694906993,0.3589485258675009,0.3918224997459165,0.35082417100589114,0.3792510973256794,0.0,0.3725665089289525,0.37312774248153524,0.0,0.383068370764414,0.4086679030322988,0.3977547894935365,0.366043110344822,0.3733679786426277,0.3694731116657649,0.3504243932002703,0.35406322662287276,0.3835008086504471,0.3815040432735497,0.3761053882928404,0.3837099242193136,0.3687062406118185,0.36552825372603626,0.3536196346620086,0.3897779773922673,0.37630368275671244,0.3675404605998195,0.3798124645280126,0.38389807741699533,0.37401235679888034,0.3817608576116664,0.37596863431743216,0.3537788824648408,0.3988252052945262,0.36186055831763464,0.3837220237002945,0.36989701815313714,0.36580062198282537,0.40528517343722525,0.3600376163919094,0.3737176257014867,0.3809012277736001,0.38114561233087435,0.3765580767505591,0.37141013955763696,0.3515766365624004,0.3823360451137006,0.3602482412271675,0.3784606643678271,0.38524651726881143,0.3686506483459254,0.3700763444225491,0.3742578679313028,0.36451942357275946,0.3655669996632761,0.3620561599094436,0.3841644144792467,0.35965132995818205,0.0,0.3801866687564684,0.0,0.0,0.3646280498925733,0.0,0.37492003158644344,0.33331009823131647,0.3547670257172693,0.0,0.3875990075816807,0.36855053942700045,0.36710767686063805,0.4024485777946266,0.4006495917264969,0.0,0.35527546361474627,0.3662398826413189,0.0,0.35903287962064157,0.3741975142178169,0.39227212886216006,0.3760923814110112,0.0,0.3732707834530485,0.0,0.37312270478198695,0.38931070894910574,0.35857171793769943,0.39872858639625763,0.0,0.3789405983397692,0.38574204581396854,0.38652539246551726,0.3654858604621483,0.3904607564515145,0.38632482445599353,0.3719347459996944,0.36653779632886363,0.38440912860521326,0.36416068993008877,0.3798279897328197,0.38516189620073404,0.0,0.3798227109160052,0.3971863575280689,0.3867887843879611,0.36013311426210465,0.0,0.38841467985175093,0.3777514468348918,0.38266605911280027,0.0,0.3517351847026305,0.38563376571428876,0.0,0.3839516918915426,0.4056158096854162,0.36705684548474393,0.39177742982589153,0.3555681805556944,0.38005993990771814,0.37383067047385876,0.3758496117235691,0.3555099091715348,0.3877290011451655,0.38187384742677527,0.37353655780622597,0.0,0.3462308002805111,0.0,0.37177277873516246,0.3759365724333066,0.0,0.36638324155593144,0.3918110221355689,0.0,0.3592666691692639,0.3730450522659182,0.36247440995368174,0.356938263384032,0.3841631535539866,0.38520163573824107,0.3731988055847094,0.37803711485717206,0.36006636041653706,0.3778178809354755,0.35564604673942807,0.3725434079968539,0.3965727742189854,0.36102421075942687,0.3860945606489682,0.38570343767104953,0.3656469499453505,0.3711664188171677,0.0,0.0,0.3889344451019446,0.365703181843327,0.3812728930059998,0.35615493625605943,0.0,0.368622878728702,0.3911027286273128,0.37968840687656624,0.3731538122720466,0.3611656480603009,0.3831673649828059,0.0,0.39473296964791016,0.3489676430062275,0.3794742223199529,0.0,0.3717832653210042,0.37269741579992954,0.36946732284897765,0.3743210258158325,0.37971605093155925,0.34057227920937533,0.377938394692624,0.3979128873952694,0.35732011909213013,0.3797375265007325,0.3722980879544845,0.36582583665391516,0.372193631528739,0.0,0.3737385528355329,0.37149596335336704,0.39595809518060704,0.36380880099960244,0.368767438653903,0.37618451436847195,0.37626066959442483,0.386087207790166,0.3690723516733444,0.3587856790921751,0.40187884226751924,0.38281754710113125,0.3833348044732481,0.375157310090975,0.0,0.3817329879209708,0.3604788998181827,0.3790827708125155,0.3717127177784662,0.36256278446602563,0.370118995825277,0.3717053713024867,0.3597270040421355,0.3887093399370406,0.3903902438799581,0.36693714226818375,0.3664000427453336,0.3825184273986734,0.37767032081295826,0.3860480447058373,0.0,0.0,0.3692251877456099,0.3781123115923015,0.38536831941808714,0.3472097367355756,0.37243804664115693,0.3720849114070682,0.37533189991582955,0.3789687891585739,0.3762648107221138,0.3941848131953981,0.0,0.3683885669757958,0.378304655792928,0.37141898784776967,0.37882508454652536,0.3621477522333536,0.3641291537696025,0.37074906309257644,0.3698410570726711,0.3914219657925981,0.36776381867667146,0.3557755155712737,0.37291444204709123,0.37732177946831624,0.0,0.0,0.37341400377338746,0.3861351183157004,0.3909728326599548,0.3738928989251989,0.3593092448744675,0.3759744174795909,0.36890926591132167,0.0,0.3906171905316945,0.4010811066291342,0.372075341058523,0.3966071663056969,0.3771260828442978,0.38790284752923115,0.3609088190798321,0.3676114116335509,0.37337663049082004,0.3926865524988348,0.37364337863815916,0.39015463730162986,0.37537200758325967,0.37239161020050887,0.3700083432805756,0.3977243699291211,0.36702286210164187,0.3721336523760558,0.36021395295617054,0.4229935163564391,0.0,0.3851208531906002,0.37436866832224397,0.38653552266398583,0.3811680355398415,0.3978984071205273,0.3878611663023412,0.37478226479075233,0.38728650341288007,0.3629218463485647,0.4051677482352814,0.37433707564707386,0.366094118930278,0.38774914205531125,0.3696372098196297,0.36255565864954714,0.0,0.38049224735627296,0.3695705002812327,0.0,0.3664341919295552,0.37253091852679837,0.36080661800076197,0.36869932806676825,0.3823955704696186,0.0,0.3606022099919323,0.38747997262431555,0.37880164282232803,0.0,0.3468918346079439,0.37179574743878513,0.36340459435286415,0.36746764075103777,0.36672705866617494,0.0,0.35676743873394573,0.38696124008972094,0.3879411631975403,0.3828264470610999,0.36739132546606573,0.3720210182119592,0.0,0.37893442014009643,0.38163251791218467,0.0,0.38436912847313615,0.37090259780461426,0.38195923257542447,0.3683944797306006,0.36041063928874556,0.3647366425027493,0.36375986637479,0.0,0.3760008503472196,0.35992286842198956,0.0,0.370235606743783,0.3845882468288336,0.36605184362364623,0.39699704014707177,0.37191916965837607,0.3595271329851929,0.3957714352812322,0.37955282766117093,0.0,0.0,0.38597165220415297,0.37012317253562665,0.0,0.37907967352983885,0.36568953096197354,0.36201268510531587,0.3986597066777357,0.37797013539551233,0.3782895793148742,0.0,0.3865172776163034,0.0,0.3649145750717335,0.3871001785589277,0.3616738509150224,0.3660886393123696,0.37354272673782823,0.0,0.3882440720851952,0.3616653473704484,0.37137532337495627,0.3871892421626632,0.385206959757984,0.337640202224902,0.3729748857041137,0.3636455148684232,0.0,0.37733406808044345\nCONDUCTANCE_PM,largest_node_weight_complete,0.130594230062,0.0444724151834,0.1457221213343927,0.14412918273480577,0.14586483257902047,0.16887971206389632,0.1469473640524887,0.15562815348421324,0.15879580545656688,0.1438520808414216,0.12223997677962221,0.0,0.1399278756100476,0.14083749744675764,0.16446016322978665,0.13891779661459747,0.14175664285587128,0.1332995695987101,0.1318362505410223,0.13446735840453602,0.14784560624983767,0.1436820436227125,0.1489441634154509,0.1419997140878378,0.15578699308593302,0.147177030977733,0.0,0.14545109102431647,0.13643374844301678,0.13682901831827826,0.13951454831322946,0.14581171379830726,0.16034660851862675,0.14721909487163454,0.1540076898167219,0.13544693269587935,0.1546526953651504,0.1467034267614777,0.1118488232996918,0.13650506048440764,0.1577159195099144,0.14264840831821435,0.0,0.12964868955085246,0.14747303448940044,0.1403035958508884,0.14565216064019632,0.14589005640472713,0.15103849218631285,0.13922683488582244,0.14694808271393234,0.12805005799846578,0.15525280886765758,0.1541419892827418,0.14280415620056316,0.12701488485211526,0.13948020563942637,0.1447119103996878,0.0,0.1263580380095697,0.13962411704274846,0.13792605485092643,0.13963689134200663,0.16594771617148643,0.1342209387685196,0.16583799416664938,0.0,0.144566959209515,0.12864374492375832,0.13460599804112272,0.13911720568422206,0.1545893478724666,0.14735600920930259,0.13348285326798498,0.0,0.14122882431667894,0.1391082916191158,0.14051666683276984,0.14389318043506047,0.13602759813824467,0.1452854275043833,0.16032574352301077,0.1395716426428652,0.1390385164820201,0.11974201706954664,0.14557113728228108,0.1420952003698726,0.16725778319919532,0.0,0.14541809398469951,0.14694065249275912,0.13832589286184455,0.0,0.12112852110243193,0.1330758399233275,0.14062795485348073,0.15607200944402105,0.14986886289706242,0.12177357149519384,0.15247725093282877,0.1460951478820896,0.15159121495841965,0.14454587166651658,0.15751383065140143,0.14956403048281441,0.1631347466117332,0.1327800079563739,0.1338032283104638,0.1354706504421127,0.14282775205302659,0.14031873008576176,0.15741193677663365,0.14693687667912622,0.13198510497562424,0.13489897136885232,0.17150799104950756,0.1492314730905138,0.14634673717051125,0.14529620797104087,0.13107367738035414,0.0,0.14494440288305047,0.14880484682078032,0.1315639090627419,0.14598625956347683,0.14041056577172545,0.1416558877464833,0.15561213369118457,0.1399888244597406,0.14836454194542512,0.14706275481720008,0.1403550414429185,0.15173142659881217,0.13132546773046122,0.13573614255410668,0.14468154525305607,0.1438432932171568,0.13584712687859546,0.159517027768442,0.1567364277863951,0.1639802900199819,0.14382698805266703,0.1490947000615216,0.1546937613234091,0.1564839263544497,0.13295526066606567,0.15343079225437598,0.14084869109502085,0.1377651504175958,0.14438313596551552,0.12690393501356498,0.14663870364353135,0.13699568059770592,0.14350446634755615,0.15501580408943663,0.13393003134341422,0.12827397121291648,0.14334318894178172,0.11720427548366981,0.15587661761052668,0.0,0.13422587491889296,0.14614503921822267,0.15106848631973765,0.1486069131062667,0.15361882565010967,0.13147319153624387,0.15152647477572864,0.14756760036770558,0.14572033192327746,0.13832444736979635,0.0,0.13278832895902648,0.13947508079052656,0.14630958339781208,0.14775291136693816,0.0,0.14741841579165355,0.14494575959303288,0.1517876679206295,0.13623270566241705,0.16332264706662428,0.14524223598310249,0.0,0.13307719218346936,0.15753994926628365,0.1414414840787831,0.13578348777044005,0.15633890578166776,0.1514150696694478,0.14359844088214035,0.13792077600516553,0.14276984818652633,0.15443411670338247,0.14575127474784597,0.16209216649380553,0.13482052208086756,0.14333073011698785,0.0,0.14308385899520237,0.14042828352890802,0.14848319064290444,0.14348789084380315,0.14862582548177636,0.0,0.13687174300909064,0.1526838923483884,0.12699294234041944,0.14592041258795002,0.14877982371580872,0.13665035111709803,0.14307747668971776,0.15261008448352253,0.14543116672338735,0.1416456064688583,0.1524464933313294,0.0,0.16087334488725924,0.14394028568550463,0.0,0.15497277413467558,0.15723105736610501,0.14698208199943152,0.14357593306162905,0.1471452403849244,0.146808017875816,0.147532923171386,0.14509611842026346,0.14743542133802198,0.13991184540596202,0.1265111051822959,0.14896414430943827,0.1413969986158649,0.14654972707769284,0.14747168012509934,0.12647416460514888,0.13883286892569888,0.0,0.13887005988360038,0.15791730401028653,0.16494868836709906,0.12728145628255721,0.1442177087952553,0.15166411112667788,0.14720650169302643,0.13790493636930254,0.14183504809719472,0.1470016132298691,0.13260536824078717,0.1405891790228329,0.15235996332153076,0.1387600767148929,0.14469916579057265,0.1645095123533,0.0,0.14019189509580576,0.16123287878247217,0.0,0.14380047921824865,0.13752785878195786,0.15035321152198391,0.15228965209477185,0.13222511639150017,0.1475940500399564,0.16289612339504778,0.15575835947104852,0.1339384441531739,0.15037106621942092,0.1388213645769269,0.14564300144038433,0.13262028037338433,0.16156120070229726,0.1474472847279922,0.14081702759222614,0.14551295459877434,0.1500463170957616,0.14892984458357278,0.14206935475578672,0.13983332505980497,0.13364393538244101,0.0,0.1762311864132864,0.14998725673414975,0.1501621666823359,0.13999007373251549,0.1442514409115053,0.1338343116374252,0.15083838253386028,0.13807969086938884,0.13540700797269808,0.13261153677124002,0.13857831505105014,0.14638934939311316,0.14066444137377843,0.1374846852480192,0.0,0.16193136318934184,0.1545828191559991,0.13476560234043267,0.15694282125212236,0.13856699105618148,0.12985236414480875,0.1374761443489953,0.1420356711338221,0.15211511502813205,0.13986411243801786,0.1367514921694392,0.14467678679278909,0.1511916769390307,0.14346219012014638,0.14710669863024736,0.13892228829896106,0.0,0.1529990414092647,0.15364144253714723,0.14816403156042512,0.13446710919424731,0.1494346328429674,0.14638188364207833,0.16242470074154886,0.14468975060287953,0.15974186232568144,0.15274965750675132,0.16441581247191642,0.1579357879061042,0.14895445628654116,0.13123310107953756,0.15995913780199383,0.1487879005891902,0.1526233819876081,0.13801433320752193,0.14820688106572721,0.14434494147419863,0.13102605562832492,0.1320163381213308,0.0,0.14841033950093135,0.1558337576591553,0.14620674516025134,0.15298194228348333,0.13681815981265424,0.14719285883811292,0.15893892323373068,0.13247692151143048,0.14750173709326833,0.12951042043283234,0.1451607189936035,0.14093272581927266,0.16306288268124183,0.15285433791710187,0.13239827395109724,0.1526811572298318,0.13826191648527403,0.15042076943472532,0.1637490149290124,0.14873719674364677,0.13118173105405678,0.0,0.14631620007267815,0.1386791703740547,0.1314462047058093,0.15218305073348665,0.13949833882808546,0.14617092745037877,0.14268971479487702,0.137039220827325,0.14587257305630574,0.13403250019442317,0.0,0.14916158005546093,0.13960994457812226,0.1367775940797554,0.1387848230458285,0.14992033806801947,0.0,0.14907865584689836,0.1335633461044251,0.1450322646871321,0.14932174569850393,0.1392975131462746,0.13735691755641108,0.15079974355344294,0.15177470827282089,0.14084884539336964,0.14734350243822608,0.14306292882856605,0.14965430728740456,0.142479414663848,0.1301400876145541,0.16099070688425973,0.14538346704906624,0.14589887555035802,0.14622514780192739,0.16763598358802617,0.15282124171722605,0.145763274729182,0.1444876184694723,0.15008874281614557,0.1578705714305963,0.13294562676035293,0.1548554002217155,0.0,0.0,0.15522445590601697,0.13639269303005885,0.13409193770255948,0.14592990736451766,0.15466917143812817,0.15109057867478876,0.16191388580885915,0.14046706050535906,0.16104407461866035,0.15402485377591,0.13881926085086851,0.16334550834938158,0.15713779975448638,0.14314156468989375,0.1407978926438092,0.0,0.13311433968437678,0.0,0.14294999472015485,0.14899656926684707,0.1522674752734685,0.155586701377026,0.1437730431254928,0.13229558952085374,0.1220684085694128,0.15713773025601796,0.13813057722538197,0.13381750899249967,0.16689896785554134,0.0,0.15238726495501198,0.14522649116281283,0.15262879016405392,0.16091310357128755,0.1384004850411137,0.14114601604670793,0.0,0.0,0.1450472449915436,0.15286708636634536,0.14257278847361335,0.15145641108517657,0.14643539510587686,0.141577693940034,0.1425274109686772,0.14660899527792282,0.12778788464317195,0.13682781917120215,0.0,0.13663568119616112,0.13198289257174614,0.1337129922807851,0.13662724799966092,0.1508309308288015,0.14984530043980016,0.14208617443298407,0.14982797817124088,0.14232925335476296,0.14212895434602474,0.1434614973373193,0.13915446899243095,0.15236518358089057,0.14670980743279624,0.15448834664700226,0.1499791087631956,0.16138287615661873,0.14534517190175988,0.15002894473785294,0.15394275803258128,0.0,0.15281526230774506,0.1404729480731551,0.14553016673761776,0.15308565616429312,0.17315481956587145,0.0,0.14094515196474838,0.1648126711228246,0.14345909594702028,0.1503957292151339,0.1388566168082495,0.1404617205601885,0.140159690115655,0.14531535079975746,0.0,0.1377375890981651,0.13328050954832435,0.16031085205890333,0.14344112581663396,0.0,0.16313383896902198,0.1474025654355932,0.0,0.16129649221817563,0.1415992262096886,0.0,0.14716551145054604,0.13688270910425565,0.15451270058852248,0.1603843315047758,0.13882744449716208,0.14312345298266432,0.14040364673203595,0.14012592530629722,0.13117821814528288,0.15027017549018853,0.13989316427202686,0.1633769563446452,0.13537373098739106,0.12673975429989884,0.1549000699435457,0.14162989784529031,0.15216810858239088,0.14860857692243026,0.1645343310553081,0.154121688479169,0.0,0.14707812434121187,0.15093254740210152,0.13578519915510912,0.14561233127863735,0.1516985967266718,0.15795560896360544,0.14790077480235692,0.14781091779272684,0.13852875539016207,0.14291308767054242,0.14236341887520543,0.12769837037343174,0.152682901074165,0.15712677431159647,0.0,0.1426505265323156,0.14135521413219654,0.14278821263416938,0.1377052878880679,0.14930984179219398,0.14749285488687802,0.15473164235161915,0.1539793068864346,0.1383161468967056,0.13702317145152693,0.1521778597837,0.14681418536977575,0.13569515936022947,0.13336518196783456,0.15101564038289403,0.15532175645922144,0.1319709147452267,0.13989905474604813,0.0,0.1559743344731572,0.14682547384260258,0.15602674844432535,0.14137997726441592,0.12917022755051,0.14266074459436234,0.13872683819266984,0.14403875710448086,0.14648611586144988,0.12602289615340775,0.1335191178655731,0.1627334943398314,0.15414480016416224,0.13555169111649826,0.1440057634566891,0.1408789290722344,0.1444015747414147,0.15087187279019515,0.13208618313620704,0.15754690164925278,0.0,0.1453049718715503,0.0,0.1584686175265908,0.15533739839698607,0.14051646576967436,0.13168591212777017,0.0,0.1439181133019459,0.15099858330241592,0.15435839893695977,0.12846477634055387,0.0,0.1522625381205585,0.14920436641722687,0.14164052584148853,0.1324242794237095,0.1590218221695689,0.15325037736346503,0.1338138723865322,0.1521562896313903,0.1428111510011181,0.14952933190001874,0.14866817056603016,0.15588128454025799,0.1313276458712846,0.13670723797196235,0.1388791037639166,0.14965916815339045,0.16089317177004822,0.1502271441040142,0.14358131971758,0.14499515718777642,0.0,0.1484686539035858,0.1456014276509938,0.12220465776017796,0.15909305704438906,0.15277761201068926,0.14558089011315828,0.14735338762147546,0.1417737489741837,0.13477602983395773,0.1351942820346674,0.15130296577179997,0.16603446150036505,0.1497926352853553,0.0,0.15732029167183248,0.15585053037428853,0.14485686580771853,0.1528267809727218,0.14358519467583247,0.14640074926054056,0.14311808860836744,0.13976673898290398,0.13363560974574737,0.14197737544876063,0.15471618551663335,0.14114753730722293,0.1541680924433178,0.14192607424272147,0.13536594258552292,0.1665922249573601,0.14988134204897977,0.16895802007371566,0.14636171668929562,0.0,0.1484561870408041,0.14959673309816723,0.0,0.15509493658481768,0.14004181584973538,0.14620297725338846,0.1583543478380198,0.15731649185332364,0.15399393045178866,0.15939249948521317,0.1399163163718352,0.15700301240399092,0.13508572010913358,0.15227058569792556,0.14493562746441493,0.14387805109012833,0.1338847691804001,0.13868027282184486,0.15271399169924532,0.14313109173622082,0.13997168966433485,0.13571406928049204,0.14356894417655178,0.13187136520397902,0.16463560506329525,0.14390396656333376,0.1513705966386354,0.15291289444621278,0.15272917993384072,0.1307749862821276,0.1378837828622719,0.15652962736980014,0.1415753177230826,0.1567603479642573,0.14313096285817614,0.1600584035214597,0.1516663528551991,0.14253574641432692,0.14091466639989286,0.1463991985432574,0.13971611038321874,0.14382883359786688,0.14608723562084044,0.15250857972346407,0.14864339540861163,0.13465544221952844,0.140527095791385,0.15601974536914848,0.14274493051390602,0.1418048157607302,0.13767761678220417,0.14875783937811649,0.0,0.1386347836991003,0.0,0.0,0.1444013144214831,0.0,0.13186364667192033,0.13281259592834238,0.15365675815824392,0.0,0.1427141676631143,0.14278916937372252,0.1395254881151975,0.15243862719847012,0.15530694500341327,0.0,0.14229719643537284,0.13918655816640632,0.0,0.1340629661852664,0.15643017662630626,0.1442585113340268,0.13842363667242402,0.0,0.15049569501439736,0.0,0.11730506082364005,0.15012956805270936,0.14248874798097608,0.1381021502991083,0.0,0.1488016714425006,0.15105418511490154,0.13177772268277863,0.14490914162843846,0.14017573824397925,0.16004536415733622,0.15385711291146156,0.14797428580130417,0.13692555767513134,0.15165102126223592,0.14902736798622088,0.1372602025090658,0.0,0.13330018534056245,0.14213730458348584,0.13219660484037582,0.14744416468875987,0.0,0.16928789892200866,0.13720595859926396,0.14629447538266327,0.0,0.15314689486306532,0.1481451741876863,0.0,0.14881083721784047,0.13669672244473763,0.1554000206605927,0.14747675444139785,0.1488258892623074,0.1397261478727119,0.14290760421185955,0.14003068229517632,0.13788403259729826,0.12376174714760954,0.14994607711385027,0.1579011425368937,0.0,0.14236908939921306,0.0,0.13246388573780485,0.136182046826622,0.0,0.14017431684612922,0.14310931992584597,0.0,0.1513118662057408,0.14856003877379986,0.132481224320619,0.1541605308627475,0.15156254946821646,0.13928410808906086,0.14518634837421995,0.13581374492105325,0.14224461984468262,0.13936934965715983,0.15983749692029176,0.15872326458721187,0.13963897536873499,0.1586027063402074,0.1612330119378212,0.14462301507415656,0.15475147880108236,0.14630668966398094,0.0,0.0,0.1627856459329408,0.15038822857740833,0.14369591095532777,0.1411766122081715,0.0,0.1482888260874316,0.14619602028665593,0.1506368674255376,0.14320294153916482,0.1371986811825389,0.1343970206091071,0.0,0.14209459355347545,0.15044136439261843,0.15617549293730085,0.0,0.15597130438716753,0.1489616470677281,0.16253766955002633,0.1548849761079509,0.14416076299387182,0.15898678794323629,0.16596180637397107,0.14550850179092567,0.1522747527166997,0.1442985292578239,0.14690897586777138,0.14032492651866268,0.13705159905952474,0.0,0.13666022419298857,0.1409263838467949,0.17283694886756076,0.15066928438487157,0.1327089564360369,0.15738228775841265,0.15820330082352654,0.1476565356275951,0.13484512629493503,0.1341576314500278,0.1477477577760962,0.14625724756724287,0.13798134779041055,0.17474416882877483,0.0,0.14231900448041812,0.15071263287762454,0.12974596265031516,0.125298992612788,0.14287998667729634,0.1337281508360008,0.13468274596963228,0.14527379460161297,0.14554885034685966,0.14255572497935634,0.15578349524482443,0.1425395569032032,0.14908246030198188,0.16617990094712703,0.15172936318988836,0.0,0.0,0.15906958320832584,0.1389993066591217,0.1552646668679863,0.14197642383326745,0.13883506988000932,0.13515605696073793,0.13790696642976377,0.15234971357722116,0.14418901607216458,0.13911147438746868,0.0,0.14404106962336388,0.14851845515885062,0.14090638934588262,0.13977932651129266,0.1328752034850949,0.13745094085712456,0.1510771483747083,0.14081014715393786,0.14171131398978823,0.1462959853798434,0.13622474225659256,0.13359791228492,0.13723203895868083,0.0,0.0,0.13354335702840348,0.13821345706293986,0.14720929061881294,0.1305437552596409,0.1492743223374413,0.15747940493156196,0.14371672055424414,0.0,0.15564116236302336,0.13513668153371222,0.15277242853919384,0.16082805779894468,0.14531922940471545,0.13299756074217922,0.12610162864904856,0.16547971495548747,0.14400403689577787,0.1409141903136108,0.13449004001537465,0.13454300009161785,0.12233566377962105,0.13324772831036785,0.13549194635376863,0.15047204667332112,0.15572242049869506,0.14536098238713846,0.14006159796819115,0.12849114363215794,0.0,0.14003420496947963,0.15047095077776343,0.1503968998332621,0.1389603449255193,0.13394856181234152,0.14088110043861482,0.1337710173334474,0.1426792254281946,0.1412401460313417,0.13251487253652225,0.16044452308803173,0.14153386117300498,0.14680084081069458,0.15267334181635447,0.14379726461965736,0.0,0.1309499616476912,0.1545713940112734,0.0,0.14099876625022867,0.16326027090482506,0.1418232865172591,0.14776828170398534,0.14887704875289284,0.0,0.1534241960801359,0.14119734168048795,0.12954619085807245,0.0,0.15118239071049136,0.1548333138887519,0.1474067242899561,0.1292566970333546,0.13268791462187488,0.0,0.1236269037190996,0.141589502667239,0.15736552247222738,0.1396869444504065,0.15980084565733949,0.14672094479788833,0.0,0.15694549102534977,0.13436760813165358,0.0,0.14815855503073044,0.1450784059627973,0.1256236291774989,0.13640201356308015,0.16144996890052038,0.14431002496817077,0.12495811545140939,0.0,0.13550871809828852,0.15648573886424252,0.0,0.148699292076362,0.15108759796659116,0.1562849540881521,0.14935724947105442,0.14832015520456673,0.15005279175102454,0.15290134549794238,0.15096752513833803,0.0,0.0,0.14880733402091859,0.12906940571563139,0.0,0.14056088761465452,0.13881039396028175,0.139685221592213,0.13385433252442197,0.1282274706051421,0.13873581645046001,0.0,0.1439965374367322,0.0,0.14599269147105398,0.13397386038018197,0.15423843919889219,0.13929100071754938,0.15278491665375152,0.0,0.14744728763280807,0.153890118119494,0.1262332347538199,0.13911462452007928,0.1571140295920232,0.13742663848030717,0.13787017430955878,0.1599386611294877,0.0,0.11862597466675226\nMAXPERM_PM,largest_node_weight_complete,0.362643264489,0.121910819079,0.399930936,0.38723724100000007,0.39772069,0.35971571999999996,0.39041756400000005,0.41783424799999996,0.386550379,0.424725954,0.396577799,0.0,0.39271478000000004,0.421986713,0.40238843399999996,0.42911967300000003,0.39685651999999993,0.442075459,0.41063157,0.371360181,0.411824216,0.434812409,0.389939957,0.37273766699999994,0.385614754,0.402830318,0.0,0.41321935200000004,0.39712475999999997,0.39411065500000003,0.433475423,0.41574113200000007,0.403171044,0.40836322099999994,0.39649075600000006,0.403620944,0.39337418100000004,0.3730961419999999,0.43866044,0.35820449,0.388721601,0.41755690399999995,0.0,0.42818030199999996,0.40586374799999997,0.426676355,0.404614558,0.364423228,0.397528774,0.40046400299999996,0.39619667300000005,0.426373676,0.415517638,0.42534217999999996,0.417931095,0.427290007,0.375773962,0.412827856,0.0,0.40151415399999996,0.38099390400000005,0.384674352,0.39281368,0.40078017899999996,0.40502417199999996,0.406201328,0.0,0.41628591800000003,0.41514435200000005,0.362145509,0.393394688,0.410890072,0.370001826,0.39409451100000004,0.0,0.41661117799999997,0.39096404900000004,0.398232798,0.3843534049999999,0.440708413,0.398820246,0.392718736,0.411564979,0.443508669,0.39113834399999997,0.421205774,0.37912292000000003,0.377144474,0.0,0.413558759,0.38972531600000004,0.425275622,0.0,0.39757296900000005,0.40775586599999997,0.39749646,0.39492665,0.42843364500000003,0.411941341,0.417852538,0.41784409400000005,0.38949781400000005,0.406891843,0.40711397800000004,0.398648748,0.40789439600000005,0.403987882,0.44523958199999997,0.37539136500000003,0.39419300799999996,0.401805634,0.41391764999999997,0.397451658,0.38430142,0.406373892,0.394976236,0.403543294,0.40721515999999996,0.387992685,0.40347871100000005,0.0,0.40735242200000005,0.416158633,0.4101125500000001,0.396917396,0.407293563,0.394712856,0.42297617600000004,0.435715164,0.42925645599999995,0.40832072199999997,0.38095619999999997,0.402840485,0.412831256,0.39739764799999994,0.432867646,0.421190041,0.388322524,0.39113619699999996,0.385272804,0.406502936,0.42363366500000005,0.42332648,0.382633856,0.4322366489999999,0.429109481,0.397129739,0.405952991,0.39950919100000004,0.4039449930000001,0.395104309,0.38600466800000005,0.42533702799999995,0.433513148,0.385624336,0.39760477,0.400825375,0.41150160799999996,0.41666517399999997,0.40173041600000003,0.0,0.416043498,0.40490461,0.39908984399999997,0.39770816000000003,0.39607180799999997,0.39498232,0.426408854,0.405062302,0.39748101700000005,0.397832968,0.0,0.422913578,0.40132568199999996,0.406519713,0.43062525000000007,0.0,0.386146967,0.38756379300000005,0.383740975,0.418735483,0.4073604109999999,0.38820345199999995,0.0,0.421813017,0.404091098,0.390135018,0.414746355,0.39291981299999995,0.374814239,0.39851093400000004,0.37729182000000006,0.41418976399999996,0.38294281100000005,0.40248325700000004,0.40987033799999995,0.39791115,0.38344675199999995,0.0,0.413624864,0.394996813,0.409039753,0.409098701,0.40363112,0.0,0.393241465,0.386649705,0.39863027,0.41018891699999993,0.395347108,0.39422609,0.41465734699999995,0.38070164900000003,0.39611192999999995,0.409068256,0.39936483,0.0,0.39987037299999995,0.392978007,0.0,0.40517423699999994,0.406629363,0.431184279,0.41557874400000006,0.42301049999999996,0.42788978599999994,0.416036519,0.432653967,0.406465576,0.386430026,0.409242766,0.41034668,0.41159739,0.41199862900000006,0.40992952299999996,0.429407024,0.432508627,0.0,0.382729192,0.440188175,0.397859349,0.391979868,0.39876371400000005,0.3816150260000001,0.41678947800000005,0.41472982,0.424707065,0.42513397900000005,0.41432663499999994,0.395571786,0.396388117,0.393975779,0.41681109599999994,0.405916473,0.0,0.372667373,0.381944976,0.0,0.41221431599999997,0.427619506,0.395060418,0.42955739500000006,0.4074137,0.416466406,0.41556534,0.417947134,0.42943667900000004,0.40666264299999993,0.416543122,0.397701728,0.396645967,0.40564655699999996,0.386849066,0.386492885,0.40995554300000003,0.391195545,0.399637092,0.397758916,0.410203345,0.407126964,0.0,0.380934027,0.396080062,0.387840715,0.384486508,0.395619054,0.40137033999999994,0.379496113,0.37174033900000003,0.39052508999999996,0.418611148,0.39522958299999994,0.42379659100000006,0.410400798,0.406457796,0.0,0.419572253,0.39110764,0.400032926,0.392159057,0.433204532,0.421426559,0.401869165,0.401583968,0.39038573800000004,0.415115832,0.387151625,0.389754489,0.396768244,0.39215728899999996,0.36768832799999995,0.37734278599999993,0.0,0.40942625499999996,0.41078225399999996,0.410724714,0.43037581300000005,0.38028491699999994,0.398424788,0.406370636,0.416726317,0.43104407699999997,0.38846595399999995,0.36883904800000006,0.42306362299999994,0.38312193099999997,0.446229342,0.398461612,0.410199208,0.422065691,0.383057943,0.43062204499999995,0.406052274,0.40894187,0.4040291,0.0,0.421898507,0.39401128599999996,0.41485562499999995,0.40583,0.413656589,0.38583193300000007,0.390709048,0.387321865,0.402023103,0.424735619,0.390862339,0.425776451,0.379486891,0.43267425400000004,0.39471850599999997,0.39109057500000005,0.40271137000000007,0.435685778,0.40292374900000005,0.383255746,0.419960986,0.0,0.41757136300000003,0.44183366399999996,0.396918285,0.414344063,0.418029289,0.37423716,0.44719736,0.39448673300000003,0.396980724,0.395493956,0.0,0.408358911,0.404210481,0.43051144199999997,0.374573132,0.408466518,0.0,0.386749011,0.433016387,0.42607664300000003,0.409649993,0.42335836800000004,0.37911833,0.40068936499999996,0.40484780299999995,0.404231874,0.40433949299999994,0.40921976200000004,0.407896933,0.403225997,0.42056022099999996,0.402494983,0.41329702500000004,0.389014612,0.428133916,0.38369448,0.393780041,0.406183407,0.391868936,0.40027302800000003,0.424688489,0.41242929799999994,0.40906058700000003,0.0,0.0,0.398487394,0.398040956,0.389470709,0.392251472,0.390759405,0.402033357,0.36548272600000004,0.41323813900000006,0.413350407,0.395311203,0.38784146199999997,0.383623497,0.403234712,0.40661585799999994,0.415017559,0.0,0.39651042,0.0,0.392331118,0.43164277199999995,0.400993105,0.397988895,0.397069432,0.421266926,0.38179909799999995,0.42272553500000004,0.415121074,0.42056369299999996,0.438913876,0.0,0.419989196,0.39837127,0.395827053,0.37699142500000005,0.409274897,0.41141764399999997,0.0,0.0,0.379379751,0.40103789700000003,0.41592224600000005,0.39539556200000003,0.376989373,0.399285394,0.40298858499999995,0.409617032,0.41557847299999995,0.40762508999999997,0.0,0.419727621,0.385753156,0.434891293,0.437009049,0.417893056,0.415634885,0.405816075,0.41489960400000003,0.420176194,0.379671359,0.388534386,0.369711722,0.376188893,0.43513745600000003,0.383903368,0.368217807,0.37568838100000007,0.410323516,0.40378964199999995,0.38878892000000004,0.0,0.410932269,0.386009926,0.40020599199999995,0.427483792,0.40607628799999995,0.0,0.42904659,0.40992731,0.39117396099999996,0.386901253,0.41055963900000003,0.436095457,0.39471538999999994,0.404197051,0.0,0.404009405,0.405754012,0.375416248,0.43065213700000005,0.0,0.394446024,0.39231075099999996,0.0,0.359742706,0.401607783,0.0,0.422474721,0.392563214,0.41220655500000003,0.35854716600000003,0.408697619,0.41832042799999997,0.39338752,0.42007640500000004,0.400091956,0.38779798100000007,0.400158632,0.41476763699999997,0.38028456,0.372341877,0.422309364,0.424270398,0.401541792,0.40038566600000003,0.417510436,0.418727211,0.0,0.4069824080000001,0.392291294,0.39460513299999994,0.390342785,0.414146374,0.37141981300000004,0.40803123599999996,0.40968050000000006,0.383525314,0.41776336799999997,0.42428946,0.410165504,0.376012635,0.37248072800000004,0.0,0.38625872,0.39093324,0.42865749599999997,0.410575928,0.387118876,0.40025214100000006,0.39907602000000003,0.416747272,0.39030684699999996,0.39614421600000005,0.408230785,0.399853657,0.391636296,0.412233537,0.41451527899999996,0.38877421100000004,0.384411355,0.437065606,0.0,0.38461668800000004,0.406031202,0.409814123,0.38588947599999995,0.3905977769999999,0.40521188,0.400375455,0.40546108,0.406616915,0.394026392,0.419243141,0.385418573,0.411867242,0.42719206499999995,0.418357043,0.40241070300000004,0.39543886,0.40521813599999995,0.43992877599999997,0.38274816200000006,0.0,0.38932837099999995,0.0,0.40061501800000004,0.408987286,0.388962706,0.3656416859999999,0.0,0.399034324,0.38834925899999995,0.41071952599999995,0.399790725,0.0,0.39456952799999995,0.389900912,0.403869227,0.41996371699999996,0.400277357,0.3944967140000001,0.40840297300000006,0.41630112199999997,0.41179911199999997,0.385564514,0.39091938800000003,0.361609738,0.39639891499999996,0.412554712,0.40126347199999995,0.436560585,0.409744024,0.40340571899999994,0.385598544,0.40540288599999996,0.0,0.40774798799999995,0.419028415,0.399424035,0.398161629,0.416926614,0.42314294,0.38667562,0.38366754099999995,0.42008052700000004,0.42377461400000005,0.37737099999999996,0.395672285,0.40268485299999995,0.0,0.398197068,0.4133280159999999,0.39275877600000003,0.40817939299999995,0.40231439,0.41603691,0.39940051899999995,0.429731271,0.397480187,0.393711575,0.425629808,0.414707721,0.42330970100000004,0.41589674000000004,0.369857325,0.38940247,0.40687403399999994,0.38510083199999995,0.375561464,0.0,0.374886598,0.43901902899999995,0.0,0.40493739,0.441934087,0.406853419,0.38525901100000004,0.4145415820000001,0.37698317299999995,0.39434386,0.39221023799999993,0.432950515,0.421614116,0.38212974600000005,0.417892178,0.36117652299999997,0.40015146300000004,0.3959146949999999,0.40923352599999996,0.397337168,0.41249232799999996,0.405604959,0.40388235599999994,0.42516695699999996,0.39863413499999995,0.395512735,0.380385371,0.41985885,0.37717540499999996,0.38259195,0.403449608,0.368363366,0.402235607,0.39289382199999995,0.420395013,0.38312890200000005,0.40241828399999996,0.389222793,0.39403372799999997,0.388006744,0.38582756700000004,0.401455594,0.378122147,0.43621557299999997,0.38883137,0.41485467,0.41706562900000005,0.38453690900000004,0.39738208399999997,0.414151814,0.43262670599999997,0.424871078,0.0,0.423893189,0.0,0.0,0.41477892,0.0,0.404738835,0.38293237199999997,0.374806885,0.0,0.37811418899999993,0.400259315,0.394528625,0.42968649000000003,0.424382567,0.0,0.39728905999999997,0.389888911,0.0,0.39306740300000004,0.42745338400000005,0.406611702,0.41794652499999996,0.0,0.36070860800000004,0.0,0.40362632099999995,0.41877235900000004,0.364667248,0.403304713,0.0,0.409887455,0.392274782,0.413266797,0.391372522,0.41246553,0.387010436,0.4011019230000001,0.411949926,0.42315295799999997,0.384315794,0.40190408499999997,0.40669684899999997,0.0,0.405265748,0.41164617800000003,0.40918811800000005,0.38083383699999995,0.0,0.386797804,0.37665417300000004,0.400733982,0.0,0.38413612,0.39591160999999997,0.0,0.397177423,0.437877507,0.390150387,0.418922441,0.395161472,0.409784567,0.405772053,0.38177829599999996,0.400507517,0.418572167,0.412406928,0.417712699,0.0,0.384968068,0.0,0.389652274,0.42579857299999996,0.0,0.401260416,0.416637427,0.0,0.412202273,0.390084752,0.3967281369999999,0.391516575,0.413311732,0.428730956,0.40760298,0.416964769,0.387590688,0.410544264,0.38919419899999996,0.420090815,0.390233991,0.382546185,0.405534092,0.427109756,0.40806123,0.414460028,0.0,0.0,0.39471884000000007,0.390617719,0.41548334,0.401363295,0.0,0.41005604,0.396269354,0.40562468800000007,0.38243996199999997,0.41157607700000004,0.431276432,0.0,0.42472160999999997,0.36516882200000006,0.40061213900000003,0.0,0.410510333,0.403951543,0.39429236700000003,0.40687807,0.40477342600000005,0.42341965,0.404607205,0.41166759599999997,0.397246168,0.396154835,0.381686251,0.40507790699999996,0.39594088899999996,0.0,0.41599508500000004,0.406504598,0.413487496,0.38952063900000006,0.408636732,0.407816026,0.395711596,0.421894452,0.3970677469999999,0.39999188300000005,0.420527003,0.38930744,0.42762502999999996,0.37532631400000005,0.0,0.41965401399999996,0.393068123,0.38216192,0.40344024100000003,0.395215127,0.3957544050000001,0.40741991400000005,0.378978752,0.43442666599999996,0.40658541200000003,0.411353412,0.382263532,0.418227093,0.423307239,0.397091189,0.0,0.0,0.4077039840000001,0.427182982,0.387615655,0.384274212,0.378935668,0.40699325399999997,0.410037846,0.3823326,0.40359357,0.418150153,0.0,0.376801544,0.41516419299999996,0.383010711,0.405439633,0.416855764,0.398557574,0.402705878,0.362954642,0.41917047400000007,0.395373411,0.355817866,0.38266111999999997,0.396526954,0.0,0.0,0.40611618,0.389797403,0.387368861,0.40039110400000005,0.373723154,0.38917562400000005,0.393949008,0.0,0.39183995600000004,0.42865996500000003,0.381374104,0.40622161000000007,0.389240427,0.39699313999999997,0.430831745,0.377076963,0.39507426100000004,0.42894823800000004,0.420016422,0.43040373899999995,0.39614620399999995,0.39151023799999995,0.401453322,0.4306125,0.390273738,0.404593469,0.38961985099999996,0.453680965,0.0,0.38980472699999996,0.412845581,0.40985032900000007,0.431907512,0.416973339,0.425082422,0.39423905000000004,0.409429261,0.365639926,0.42047451999999996,0.407887118,0.408701755,0.41223677799999997,0.38919753700000004,0.40674667600000003,0.0,0.40130154,0.407979512,0.0,0.3846117929999999,0.387731749,0.41627858500000003,0.37365116600000003,0.441367853,0.0,0.370977015,0.398214743,0.417241235,0.0,0.38492505000000005,0.38263666,0.38654401199999994,0.391095329,0.40030324,0.0,0.377891455,0.40328851,0.399517576,0.398921188,0.39583601599999996,0.41904610400000003,0.0,0.395977127,0.392197906,0.0,0.397076411,0.41823316499999996,0.41441017399999996,0.423458194,0.397405038,0.414039986,0.38350320299999996,0.0,0.37732489199999997,0.419838595,0.0,0.38560371699999996,0.400518039,0.38469378,0.395110635,0.408267448,0.40848782800000005,0.423601938,0.4058402,0.0,0.0,0.412561555,0.39349129199999994,0.0,0.417378878,0.385570402,0.39260660199999997,0.420295388,0.417262263,0.412199962,0.0,0.4121228410000001,0.0,0.38976375199999996,0.40044358700000005,0.407245389,0.425683258,0.413051708,0.0,0.40641413099999996,0.402914117,0.399316939,0.38110010800000005,0.41176637600000005,0.390101005,0.408386935,0.379676556,0.0,0.416392274\n"
]
]
] |
[
"code"
] |
[
[
"code",
"code"
]
] |
4a6542987f17bedf28a780d98bff8917041e62ad
| 40,880 |
ipynb
|
Jupyter Notebook
|
notebook/2_PowerSystems_examples/01_PowerSystems_intro.ipynb
|
kdayday/SIIPExamples.jl
|
5d7ca49274e242848845cec5e0bc881a207b6072
|
[
"BSD-3-Clause"
] | null | null | null |
notebook/2_PowerSystems_examples/01_PowerSystems_intro.ipynb
|
kdayday/SIIPExamples.jl
|
5d7ca49274e242848845cec5e0bc881a207b6072
|
[
"BSD-3-Clause"
] | null | null | null |
notebook/2_PowerSystems_examples/01_PowerSystems_intro.ipynb
|
kdayday/SIIPExamples.jl
|
5d7ca49274e242848845cec5e0bc881a207b6072
|
[
"BSD-3-Clause"
] | null | null | null | 42.583333 | 2,249 | 0.541756 |
[
[
[
"empty"
]
]
] |
[
"empty"
] |
[
[
"empty"
]
] |
4a6542e1a8ffb81f92c4b4ccb82db56639be8f98
| 23,949 |
ipynb
|
Jupyter Notebook
|
scheduled_bots/obo/notebooks/Single OBO Term bot.ipynb
|
stuppie/scheduled-bots
|
c699629d3ff6ed515d54e143d3940d79c12cbb81
|
[
"MIT"
] | null | null | null |
scheduled_bots/obo/notebooks/Single OBO Term bot.ipynb
|
stuppie/scheduled-bots
|
c699629d3ff6ed515d54e143d3940d79c12cbb81
|
[
"MIT"
] | 5 |
2016-11-25T03:18:44.000Z
|
2017-01-10T19:07:09.000Z
|
scheduled_bots/obo/notebooks/Single OBO Term bot.ipynb
|
stuppie/scheduled-bots
|
c699629d3ff6ed515d54e143d3940d79c12cbb81
|
[
"MIT"
] | null | null | null | 42.462766 | 2,420 | 0.507913 |
[
[
[
"%%capture\n!pip install wikidataintegrator",
"_____no_output_____"
],
[
"from rdflib import Graph, URIRef\nfrom wikidataintegrator import wdi_core, wdi_login\nfrom datetime import datetime\nimport copy\nimport pandas as pd\nimport getpass",
"_____no_output_____"
],
[
"print(\"username:\")\nusername = input()\nprint(\"password:\")\npassword = getpass.getpass()\nlogin = wdi_login.WDLogin(user=username, pwd=password)",
"username:\nandrawaag\npassword:\n········\n"
],
[
"# functions\ndef createOBOReference(doid):\n statedin = wdi_core.WDItemID(obowditem, prop_nr=\"P248\", is_reference=True)\n retrieved = datetime.now()\n timeStringNow = retrieved.strftime(\"+%Y-%m-%dT00:00:00Z\")\n refRetrieved = wdi_core.WDTime(timeStringNow, prop_nr=\"P813\", is_reference=True)\n id = wdi_core.WDExternalID(oboid, prop_nr=oboidwdprop, is_reference=True)\n return [statedin, refRetrieved, id]",
"_____no_output_____"
],
[
"query = \"\"\"\n\nSELECT * WHERE {\n ?ontology rdfs:label ?ontologyLabel ;\n wdt:P361 wd:Q4117183 ;\n wdt:P1687 ?wdprop .\n OPTIONAL {?ontology wdt:P1813 ?shortname .}\n \n OPTIONAL {?wdprop wdt:P1630 ?formatterURL .}\n FILTER (lang(?ontologyLabel) = \"en\")\n}\n\"\"\"\nwdmappings = wdi_core.WDFunctionsEngine.execute_sparql_query(query, as_dataframe=True)",
"_____no_output_____"
],
[
"oboid = \"SO:0000110\"\nobouri = \"http://purl.obolibrary.org/obo/\"+oboid.replace(\":\", \"_\")\noboontology = oboid.split(\":\")[0]\n\n## Fetch the OBO ontology\nobog = Graph()\nobog.parse(f\"http://www.ontobee.org/ontology/rdf/{oboontology}?iri=\"+obouri, format=\"xml\")",
"_____no_output_____"
],
[
"oboqid = wdmappings[wdmappings[\"shortname\"]==oboid.split(\":\")[0]][\"ontology\"].iloc[0].replace(\"http://www.wikidata.org/entity/\", \"\")\nwdmappings",
"_____no_output_____"
],
[
"# wikidata\nobowditem = wdmappings[wdmappings[\"shortname\"]==oboid.split(\":\")[0]][\"ontology\"].iloc[0].replace(\"http://www.wikidata.org/entity/\", \"\")\noboidwdprop =wdmappings[wdmappings[\"shortname\"]==oboid.split(\":\")[0]][\"wdprop\"].iloc[0].replace(\"http://www.wikidata.org/entity/\", \"\") #gene ontology id\n\n\n## Fetch Wikidata part of the OBO ontology\nquery = f\"\"\"\nSELECT * WHERE {{?item wdt:{oboidwdprop} '{oboid}'}}\n\"\"\"\nqid = wdi_core.WDFunctionsEngine.execute_sparql_query(query, as_dataframe=True)\nif len(qid) >0:\n qid = qid.iloc[0][\"item\"].replace(\"http://www.wikidata.org/entity/\", \"\")\nelse:\n qid = None\n\n# Bot\n## ShEx precheck\n\nif qid:\n item = wdi_core.WDItemEngine(wd_item_id=qid) \n # precheck = item.check_entity_schema(eid=\"E323\", output=\"result\")\n #if not precheck[\"result\"]:\n # print(qid + \" needs fixing to conform to E323\")\n # quit()\nprint(\"continue\")\n\nobo_reference = createOBOReference(oboid)\n\n# Statements build up\n## OBO ontology generic\nstatements = []\n# OBO ID \nstatements.append(wdi_core.WDString(value=oboid, prop_nr=oboidwdprop, references=[copy.deepcopy(obo_reference)]))\n# exact match (P2888)\nstatements.append(wdi_core.WDUrl(value=obouri, prop_nr=\"P2888\", references=[copy.deepcopy(obo_reference)]))\n\n## OBO resource specific \n### Gene Ontology\ngotypes = {\"biological_process\": \"Q2996394\", \n \"molecular_function\": \"Q14860489\", \n \"cellular_component\": \"Q5058355\",\n}\n\nfor gotype in obog.objects(predicate=URIRef(\"http://www.geneontology.org/formats/oboInOwl#hasOBONamespace\")):\n statements.append(wdi_core.WDItemID(gotypes[str(gotype)], prop_nr=\"P31\", references=[copy.deepcopy(obo_reference)]))\n\n#external identifiers based on skos:exactMatch\nfor extID in obog.objects(predicate=URIRef(\"http://www.w3.org/2004/02/skos/core#exactMatch\")):\n # if \"MESH:\" in extID:\n # statements.append(wdi_core.WDExternalID(row[\"exactMatch\"].replace(\"MESH:\", \"\"), prop_nr=\"P486\", references=[copy.deepcopy(do_reference)]))\n if \"NCI:\" in extID:\n statements.append(wdi_core.WDExternalID(row[\"exactMatch\"], prop_nr=\"P1748\", references=[copy.deepcopy(do_reference)]))\n if \"ICD10CM:\" in extID:\n statements.append(wdi_core.WDExternalID(row[\"exactMatch\"], prop_nr=\"P4229\", references=[copy.deepcopy(do_reference)]))\n if \"UMLS_CUI:\" in extID:\n statements.append(wdi_core.WDExternalID(row[\"exactMatch\"], prop_nr=\"P2892\", references=[copy.deepcopy(do_reference)]))\nitem = wdi_core.WDItemEngine(data=statements, keep_good_ref_statements=True)\nprint(item.write(login))\n",
"continue\n"
],
[
"bloeb = Graph()\nuri = bloeb.parse(\"http://www.ontobee.org/ontology/rdf/SO?iri=http://purl.obolibrary.org/obo/SO_0001565\", format=\"xml\")",
"_____no_output_____"
],
[
"print(bloeb.serialize(format=\"turtle\"))",
"@prefix ns3: <http://purl.obolibrary.org/obo/> .\n@prefix oboInOwl: <http://www.geneontology.org/formats/oboInOwl#> .\n@prefix owl: <http://www.w3.org/2002/07/owl#> .\n@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n\nns3:IAO_0000115 a owl:AnnotationProperty .\n\nns3:SO_0001565 a owl:Class ;\n rdfs:label \"gene_fusion\"^^xsd:string ;\n ns3:IAO_0000115 \"A sequence variant whereby a two genes have become joined.\"^^xsd:string ;\n oboInOwl:created_by \"kareneilbeck\"^^xsd:string ;\n oboInOwl:creation_date \"2010-03-22T02:28:28Z\"^^xsd:string ;\n oboInOwl:hasExactSynonym \"gene fusion\"^^xsd:string ;\n oboInOwl:hasOBONamespace \"sequence\"^^xsd:string ;\n oboInOwl:id \"SO:0001565\"^^xsd:string ;\n rdfs:subClassOf ns3:SO_0001564,\n ns3:SO_0001882 .\n\noboInOwl:created_by a owl:AnnotationProperty .\n\noboInOwl:creation_date a owl:AnnotationProperty .\n\noboInOwl:hasExactSynonym a owl:AnnotationProperty .\n\noboInOwl:hasOBONamespace a owl:AnnotationProperty .\n\noboInOwl:id a owl:AnnotationProperty .\n\nns3:SO_0001564 a owl:Class ;\n rdfs:label \"gene_variant\"^^xsd:string .\n\nns3:SO_0001882 a owl:Class ;\n rdfs:label \"feature_fusion\"^^xsd:string .\n\n\n"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a654f288b82d9229bc4fb6d55b239c3098900bd
| 150,047 |
ipynb
|
Jupyter Notebook
|
code/notebooks/bsnip001-Copy1_new.ipynb
|
zeroknowledgediscovery/zcad
|
5642a7ab0ac29337a4066305091811032ab9032b
|
[
"MIT"
] | null | null | null |
code/notebooks/bsnip001-Copy1_new.ipynb
|
zeroknowledgediscovery/zcad
|
5642a7ab0ac29337a4066305091811032ab9032b
|
[
"MIT"
] | null | null | null |
code/notebooks/bsnip001-Copy1_new.ipynb
|
zeroknowledgediscovery/zcad
|
5642a7ab0ac29337a4066305091811032ab9032b
|
[
"MIT"
] | null | null | null | 191.142675 | 43,052 | 0.880164 |
[
[
[
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import svm\nimport pandas as pd\nimport seaborn as sns\nfrom sklearn import svm\nfrom sklearn.model_selection import train_test_split\nfrom sklearn import metrics\nfrom sklearn import neighbors, datasets\nfrom sklearn.model_selection import cross_val_score\nfrom sklearn.datasets import make_blobs\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.ensemble import ExtraTreesClassifier\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.ensemble import AdaBoostClassifier\nfrom sklearn.ensemble import GradientBoostingClassifier\nfrom scipy.spatial import ConvexHull\nfrom tqdm import tqdm\nimport random\nplt.style.use('ggplot')\nimport pickle\nfrom sklearn import tree\nfrom sklearn.tree import export_graphviz\nfrom joblib import dump, load\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.metrics import classification_report\n%matplotlib inline\nfrom sklearn.impute import SimpleImputer",
"_____no_output_____"
],
[
"def getAuc(X,y,test_size=0.25,max_depth=None,n_estimators=100,\n minsplit=4,FPR=[],TPR=[],VERBOSE=False, USE_ONLY=None):\n '''\n get AUC given training data X, with target labels y\n '''\n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size)\n CLASSIFIERS=[DecisionTreeClassifier(max_depth=max_depth, min_samples_split=minsplit,class_weight='balanced'),\n RandomForestClassifier(n_estimators=n_estimators,\n max_depth=max_depth,min_samples_split=minsplit,class_weight='balanced'),\n ExtraTreesClassifier(n_estimators=n_estimators,\n max_depth=max_depth,min_samples_split=minsplit,class_weight='balanced'),\n AdaBoostClassifier(n_estimators=n_estimators),\n GradientBoostingClassifier(n_estimators=n_estimators,max_depth=max_depth),\n svm.SVC(kernel='rbf',gamma='scale',class_weight='balanced',probability=True)]\n\n if USE_ONLY is not None:\n if isinstance(USE_ONLY, (list,)):\n CLASSIFIERS=[CLASSIFIERS[i] for i in USE_ONLY]\n if isinstance(USE_ONLY, (int,)):\n CLASSIFIERS=CLASSIFIERS[USE_ONLY]\n\n for clf in CLASSIFIERS:\n clf.fit(X_train,y_train)\n y_pred=clf.predict_proba(X_test)\n #print(X_test,y_pred)\n fpr, tpr, thresholds = metrics.roc_curve(y_test,y_pred[:,1], pos_label=1)\n auc=metrics.auc(fpr, tpr)\n if VERBOSE:\n print(auc)\n\n FPR=np.append(FPR,fpr)\n TPR=np.append(TPR,tpr)\n points=np.array([[a[0],a[1]] for a in zip(FPR,TPR)])\n hull = ConvexHull(points)\n x=np.argsort(points[hull.vertices,:][:,0])\n auc=metrics.auc(points[hull.vertices,:][x,0],points[hull.vertices,:][x,1])\n return auc,CLASSIFIERS\n\n\ndef saveFIG(filename='tmp.pdf',AXIS=False):\n '''\n save fig for publication\n '''\n import pylab as plt\n plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, \n hspace = 0, wspace = 0)\n plt.margins(0,0)\n if not AXIS:\n plt.gca().xaxis.set_major_locator(plt.NullLocator())\n plt.gca().yaxis.set_major_locator(plt.NullLocator())\n plt.savefig(filename,dpi=300, bbox_inches = 'tight',\n pad_inches = 0,transparent=False) \n return",
"_____no_output_____"
],
[
"def getCoverage(model,verbose=True):\n '''\n return how many distinct items (questions)\n are used in the model set.\n This includes the set of questions being\n covered by all forms that may be \n generated by the model set\n '''\n FS=[]\n for m in model:\n for count in range(len(m.estimators_)):\n clf=m.estimators_[count]\n fs=clf.tree_.feature[clf.tree_.feature>0]\n FS=np.array(list(set(np.append(FS,fs))))\n if verbose:\n print(\"Number of items used: \", FS.size)\n return FS\n\ndef getConfusion(X,y,test_size=0.25,max_depth=None,n_estimators=100,\n minsplit=4,CONFUSION={},VERBOSE=False, USE_ONLY=None,target_names = None):\n '''\n get AUC given training data X, with target labels y\n '''\n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size)\n CLASSIFIERS=[DecisionTreeClassifier(max_depth=max_depth, min_samples_split=minsplit),\n RandomForestClassifier(n_estimators=n_estimators,class_weight='balanced',\n max_depth=max_depth,min_samples_split=minsplit),\n ExtraTreesClassifier(n_estimators=n_estimators,class_weight='balanced',\n max_depth=max_depth,min_samples_split=minsplit),\n AdaBoostClassifier(n_estimators=n_estimators),\n GradientBoostingClassifier(n_estimators=n_estimators,max_depth=max_depth),\n svm.SVC(kernel='rbf',gamma='scale',class_weight='balanced',probability=True)]\n\n if USE_ONLY is not None:\n if isinstance(USE_ONLY, (list,)):\n CLASSIFIERS=[CLASSIFIERS[i] for i in USE_ONLY]\n if isinstance(USE_ONLY, (int,)):\n CLASSIFIERS=CLASSIFIERS[USE_ONLY]\n\n for clf in CLASSIFIERS:\n clf.fit(X_train,y_train)\n y_pred=clf.predict(X_test)\n print(y_test,y_pred)\n cmat=confusion_matrix(y_test, y_pred)\n acc=accuracy_score(y_test, y_pred)\n \n CONFUSION[clf]=cmat\n \n if VERBOSE:\n print(classification_report(y_test, y_pred, target_names=target_names))\n print('Confusion MAtrix:\\n', cmat)\n print(' ')\n print('Accuracy:', acc)\n\n \n return CONFUSION,acc",
"_____no_output_____"
],
[
"df=pd.read_csv('combined_bsnip.csv',index_col=0).drop('DSM',axis=1)\ndf.head()",
"_____no_output_____"
],
[
"df.Biotype.value_counts()\n# 3 is HC",
"_____no_output_____"
],
[
"#df=df[df['Biotype']==3]\ndf=df.dropna()\ndf0=df",
"_____no_output_____"
],
[
"#df=df0[df0.Biotype.isin([1,5])]\ndf=df0\nX=df.iloc[:,2:].values\ny=df.Biotype.values#.astype(str)\ny=[(int(x)==2)+0 for x in y ]",
"_____no_output_____"
],
[
"ACC=[]\nCLFh={}\nfor run in tqdm(np.arange(500)):\n auc,CLFS=getAuc(X,y,test_size=0.2,max_depth=10,n_estimators=2,\n minsplit=2,VERBOSE=False, USE_ONLY=[2])\n ACC=np.append(ACC,auc)\n if auc > 0.5:\n CLFh[auc]=CLFS\nsns.distplot(ACC)\nnp.median(ACC)",
"100%|██████████| 500/500 [00:04<00:00, 120.15it/s]\n/home/ishanu/.local/lib/python3.7/site-packages/seaborn/distributions.py:2557: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms).\n warnings.warn(msg, FutureWarning)\n/home/ishanu/.local/lib/python3.7/site-packages/matplotlib/cbook/__init__.py:1402: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead.\n ndim = x[:, None].ndim\n/home/ishanu/.local/lib/python3.7/site-packages/matplotlib/axes/_base.py:276: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead.\n x = x[:, np.newaxis]\n/home/ishanu/.local/lib/python3.7/site-packages/matplotlib/axes/_base.py:278: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead.\n y = y[:, np.newaxis]\n"
],
[
"CLFstar=CLFh[np.array([k for k in CLFh.keys()]).max()][0]\n",
"_____no_output_____"
],
[
"CLFstar",
"_____no_output_____"
],
[
"from scipy import interpolate\nfrom scipy.interpolate import interp1d\nauc_=[]\nROC={}\nfpr_ = np.linspace(0, 1, num=20, endpoint=True)\nfor run in np.arange(1000):\n clf=CLFstar\n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5)\n y_pred=clf.predict_proba(X_test)\n fpr, tpr, thresholds = metrics.roc_curve(y_test,y_pred[:,1], pos_label=1)\n f = interp1d(fpr, tpr)\n auc_=np.append(auc_,metrics.auc(fpr_, f(fpr_)))\n ROC[metrics.auc(fpr, tpr)]={'fpr':fpr_,'tpr':f(fpr_)}\nsns.distplot(auc_)\nauc_.mean()",
"/home/ishanu/.local/lib/python3.7/site-packages/seaborn/distributions.py:2557: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms).\n warnings.warn(msg, FutureWarning)\n/home/ishanu/.local/lib/python3.7/site-packages/matplotlib/cbook/__init__.py:1402: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead.\n ndim = x[:, None].ndim\n/home/ishanu/.local/lib/python3.7/site-packages/matplotlib/axes/_base.py:276: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead.\n x = x[:, np.newaxis]\n/home/ishanu/.local/lib/python3.7/site-packages/matplotlib/axes/_base.py:278: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead.\n y = y[:, np.newaxis]\n"
],
[
"TPR=[]\nfor a in ROC.keys():\n #print(a)\n #break\n plt.plot(ROC[a]['fpr'],ROC[a]['tpr'],'-k',alpha=.05)\n TPR=np.append(TPR,ROC[a]['tpr'])\nTPR=TPR.reshape(int(len(TPR)/len(fpr_)),len(fpr_))\nplt.plot(fpr_,np.median(TPR,axis=0),'-r')\nmetrics.auc(fpr_,np.median(TPR,axis=0))\nplt.gca().set_title('B2 vs others',fontsize=20)\nplt.text(.6,.65,'AUC: '+str(metrics.auc(fpr_,np.median(TPR,axis=0)))[:5],color='r',fontsize=20)\n#plt.text(.6,.31,'AUC: '+str(metrics.auc(fpr_,np.median(tprA,axis=0)))[:5],color='b')\n#plt.text(.6,.19,'AUC: '+str(metrics.auc(fpr_,np.median(tprB,axis=0)))[:5],color='g')\nplt.gca().set_xlabel('1-specificity',fontsize=20)\nplt.gca().set_ylabel('sensitivity',fontsize=20)\nplt.gca().xaxis.set_tick_params(labelsize=20)\nplt.gca().yaxis.set_tick_params(labelsize=20)\n\nsaveFIG('bsnip001_updated_L2.pdf',AXIS=True)",
"_____no_output_____"
]
],
[
[
"# feature importance analysis",
"_____no_output_____"
]
],
[
[
"fig=plt.figure(figsize=[8,6])\nIMP=[]\nfor e in CLFstar.estimators_:\n IMP.append(e.feature_importances_)\nlen(IMP)\n\nN=15\nI15=pd.DataFrame(IMP).sum()\nI15=I15/I15.max()\nI15=I15.sort_values(ascending=False).head(N)\nax=I15.plot(kind='bar',ax=plt.gca())\nax.set_ylim(0.3,None)\n\nplt.gca().set_xlabel('Item ID',fontsize=20)\nplt.gca().set_ylabel('Normalized Feature Importance',fontsize=20)\nplt.gca().xaxis.set_tick_params(labelsize=20)\nplt.gca().yaxis.set_tick_params(labelsize=20)\nplt.gca().set_title('B2 vs others',fontsize=20)\n\nsaveFIG('updated_bsnip001_L2_importances.pdf',AXIS=True)",
"_____no_output_____"
],
[
"a=list(CLFh.keys())#.sort()\na.sort()\ntop_keys=a[-3:]",
"_____no_output_____"
],
[
"CLFh[top_keys[0]]",
"_____no_output_____"
],
[
"fig=plt.figure(figsize=[8,6])\nIMP=[]\n\nfor k in top_keys:\n for e in CLFh[k][0].estimators_:\n IMP.append(e.feature_importances_)\nlen(IMP)\n\nN=15\nI15=pd.DataFrame(IMP).sum()\nI15=I15/I15.max()\nI15=I15.sort_values(ascending=False).head(N)\nax=I15.plot(kind='bar',ax=plt.gca())\nax.set_ylim(0.3,None)\n\nplt.gca().set_xlabel('Item ID',fontsize=20)\nplt.gca().set_ylabel('Normalized Feature Importance',fontsize=20)\nplt.gca().xaxis.set_tick_params(labelsize=20)\nplt.gca().yaxis.set_tick_params(labelsize=20)\nplt.gca().set_title('B2 vs others',fontsize=20)\n\nsaveFIG('updated_bsnip001_L2_importances.pdf',AXIS=True)",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
]
] |
4a6553fdeabd2a3a14670e021c45e43c8713e95b
| 16,485 |
ipynb
|
Jupyter Notebook
|
test.ipynb
|
reckless129/PointNet_Custom_Object_Detection
|
1603081af8eaf612cbc6ac13f66c27773a79f534
|
[
"MIT"
] | null | null | null |
test.ipynb
|
reckless129/PointNet_Custom_Object_Detection
|
1603081af8eaf612cbc6ac13f66c27773a79f534
|
[
"MIT"
] | null | null | null |
test.ipynb
|
reckless129/PointNet_Custom_Object_Detection
|
1603081af8eaf612cbc6ac13f66c27773a79f534
|
[
"MIT"
] | null | null | null | 35.451613 | 275 | 0.533273 |
[
[
[
"import sys\nsys.executable",
"_____no_output_____"
],
[
"import argparse\nimport math\nimport h5py\nimport numpy as np\nimport tensorflow as tf\nimport socket\nimport glob\n\nimport os\nimport sys\n\nimport h5py\nimport provider\nimport tf_util\nfrom model import *\nfrom plyfile import PlyData, PlyElement\nprint(\"success\")\n",
"success\n"
],
[
"BATCH_SIZE = 1\nBATCH_SIZE_EVAL = 1\nNUM_POINT = 4096\nMAX_EPOCH = 50\nBASE_LEARNING_RATE = 0.001\nGPU_INDEX = 0\nMOMENTUM = 0.9\nOPTIMIZER = 'adam'\nDECAY_STEP = 300000\nDECAY_RATE = 0.5\n\nLOG_DIR = 'log'\nif not os.path.exists(LOG_DIR): os.mkdir(LOG_DIR)\nos.system('cp model.py %s' % (LOG_DIR)) # bkp of model def\n#os.system('cp train.py %s' % (LOG_DIR)) # bkp of train procedure\nLOG_FOUT = open(os.path.join(LOG_DIR, 'log_train.txt'), 'w')\n# LOG_FOUT.write(str(FLAGS)+'\\n')\n\nMAX_NUM_POINT = 4096\nNUM_CLASSES = 2\n\nBN_INIT_DECAY = 0.5\nBN_DECAY_DECAY_RATE = 0.5\n#BN_DECAY_DECAY_STEP = float(DECAY_STEP * 2)\nBN_DECAY_DECAY_STEP = float(DECAY_STEP)\nBN_DECAY_CLIP = 0.99\n\nHOSTNAME = socket.gethostname()",
"_____no_output_____"
],
[
"# Load ALL data\nf = h5py.File('data/test.h5')\nprint(f)\n\n#Choose a frame to test, (0,60)\nframe_to_test = 15\n\n\ntest_data = np.zeros((4096, 6))\ntest_label = np.ones((1,4096))\n\nxmax = 3.0\nxmin = -3.0\n\ndata = f['data']\nlabel = f['label']\ntest_data[:,0:3] = (data[frame_to_test][:, 0:3]- xmin) / (xmax - xmin )\ntest_data[:,3:6] = data[frame_to_test][:, 3:6]\ntest_label[:,:] = label[frame_to_test][:]\n\n \nprint(test_data.shape)\nprint(test_label.shape)",
"<HDF5 file \"test.h5\" (mode r)>\n(4096, 6)\n(1, 4096)\n"
],
[
"features = [\"x\",\"y\",\"z\",\"r\",\"g\",\"b\"]\nfor i in range(6): \n print(features[i] + \"_range :\", np.min(test_data[:, i]), np.max(test_data[:, i]))",
"x_range : 0.33921632170677185 0.5573357939720154\ny_range : 0.4437101483345032 0.6659368872642517\nz_range : 0.574175680677096 0.7316666841506958\nr_range : 0.0 0.003921568859368563\ng_range : 0.0001845444057835266 0.003921568859368563\nb_range : 0.0 0.0039061899296939373\n"
],
[
"test_data_min = []\ntest_data_max = []\nfor i in range(6):\n test_data_min.append(np.min(test_data[:,i]))\n test_data_max.append(np.max(test_data[:,i]))\n \nprint(test_data_min)\nprint(test_data_max)",
"[0.33921632170677185, 0.4437101483345032, 0.574175680677096, 0.0, 0.0001845444057835266, 0.0]\n[0.5573357939720154, 0.6659368872642517, 0.7316666841506958, 0.003921568859368563, 0.003921568859368563, 0.0039061899296939373]\n"
],
[
"features = [\"x\",\"y\",\"z\",\"r\",\"g\",\"b\"]\nfor i in range(6): \n print(features[i] + \"_range :\", np.min(test_data[:, i]), np.max(test_data[:, i]))",
"x_range : 0.33921632170677185 0.5573357939720154\ny_range : 0.4437101483345032 0.6659368872642517\nz_range : 0.574175680677096 0.7316666841506958\nr_range : 0.0 0.003921568859368563\ng_range : 0.0001845444057835266 0.003921568859368563\nb_range : 0.0 0.0039061899296939373\n"
],
[
"\ndef log_string(out_str):\n LOG_FOUT.write(out_str+'\\n')\n LOG_FOUT.flush()\n print(out_str)\n\n\ndef get_learning_rate(batch):\n learning_rate = tf.train.exponential_decay(\n BASE_LEARNING_RATE, # Base learning rate.\n batch * BATCH_SIZE, # Current index into the dataset.\n DECAY_STEP, # Decay step.\n DECAY_RATE, # Decay rate.\n staircase=True)\n learning_rate = tf.maximum(learning_rate, 0.00001) # CLIP THE LEARNING RATE!!\n return learning_rate \n\ndef get_bn_decay(batch):\n bn_momentum = tf.train.exponential_decay(\n BN_INIT_DECAY,\n batch*BATCH_SIZE,\n BN_DECAY_DECAY_STEP,\n BN_DECAY_DECAY_RATE,\n staircase=True)\n bn_decay = tf.minimum(BN_DECAY_CLIP, 1 - bn_momentum)\n return bn_decay\n\ndef evaluate():\n with tf.Graph().as_default():\n with tf.device('/gpu:'+str(GPU_INDEX)):\n pointclouds_pl, labels_pl = placeholder_inputs(BATCH_SIZE, NUM_POINT)\n is_training_pl = tf.placeholder(tf.bool, shape=())\n \n # Note the global_step=batch parameter to minimize. \n # That tells the optimizer to helpfully increment the 'batch' parameter for you every time it trains.\n batch = tf.Variable(0)\n bn_decay = get_bn_decay(batch)\n tf.summary.scalar('bn_decay', bn_decay)\n\n # Get model and loss \n pred = get_model(pointclouds_pl, is_training_pl, bn_decay=bn_decay)\n loss = get_loss(pred, labels_pl)\n tf.summary.scalar('loss', loss)\n learning_rate = get_learning_rate(batch)\n\n if OPTIMIZER == 'momentum':\n optimizer = tf.train.MomentumOptimizer(learning_rate, momentum=MOMENTUM)\n elif OPTIMIZER == 'adam':\n optimizer = tf.train.AdamOptimizer(learning_rate)\n train_op = optimizer.minimize(loss, global_step=batch)\n \n # Add ops to save and restore all the variables.\n saver = tf.train.Saver()\n \n # Create a session\n config = tf.ConfigProto()\n config.gpu_options.allow_growth = True\n config.allow_soft_placement = True\n config.log_device_placement = True\n sess = tf.Session(config=config)\n merged = tf.summary.merge_all()\n\n \n ops = {'pointclouds_pl': pointclouds_pl,\n 'labels_pl': labels_pl,\n 'is_training_pl': is_training_pl,\n 'pred': pred,\n 'loss': loss,\n 'train_op': train_op,\n 'merged': merged,\n 'step': batch}\n MODEL_PATH = \"log/model.ckpt\"\n # Restore variables from disk.\n saver.restore(sess, MODEL_PATH)\n log_string(\"Model restored.\")\n \n test_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'test'))\n\n eval_one_epoch(sess, ops, test_writer)\n\n\n \ndef eval_one_epoch(sess, ops, test_writer):\n \"\"\" ops: dict mapping from string to tf ops \"\"\"\n is_training = False\n total_correct = 0\n total_seen = 0\n loss_sum = 0\n total_seen_class = [0 for _ in range(NUM_CLASSES)]\n total_correct_class = [0 for _ in range(NUM_CLASSES)]\n \n log_string('----')\n \n #current_data = np.zeros((1,4096, 6))\n current_data = test_data[0:NUM_POINT,:]\n current_label = test_label\n \n current_data = current_data.reshape(1,4096, 6)\n \n file_size = current_data.shape[0]\n num_batches = file_size // BATCH_SIZE_EVAL \n \n fout = open('log/'+str(frame_to_test)+'_pred.obj', 'w')\n fout_gt = open('log/'+str(frame_to_test)+'_gt.obj', 'w') \n \n \n for batch_idx in range(num_batches):\n start_idx = batch_idx * BATCH_SIZE_EVAL\n end_idx = (batch_idx+1) * BATCH_SIZE_EVAL\n\n feed_dict = {ops['pointclouds_pl']: current_data[:, :],\n ops['labels_pl']: current_label[:],\n ops['is_training_pl']: is_training}\n summary, step, loss_val, pred_val = sess.run([ops['merged'], ops['step'], ops['loss'], ops['pred']],\n feed_dict=feed_dict)\n \n pred_label = np.argmax(pred_val, 2) # BxN\n \n test_writer.add_summary(summary, step)\n pred_val = np.argmax(pred_val, 2)\n correct = np.sum(pred_val == current_label[start_idx:end_idx])\n total_correct += correct\n total_seen += (BATCH_SIZE_EVAL*NUM_POINT)\n loss_sum += (loss_val*BATCH_SIZE_EVAL)\n class_color = [[0,255,0],[0,0,255]]\n print(start_idx, end_idx)\n \n for i in range(start_idx, end_idx):\n print(pred_label.shape)\n pred = pred_label[i-start_idx, :]\n \n pts = current_data[i-start_idx, :, :]\n l = current_label[i-start_idx,:]\n \n for j in range(NUM_POINT):\n l = int(current_label[i, j])\n total_seen_class[l] += 1\n total_correct_class[l] += (pred_val[i-start_idx, j] == l)\n \n color = class_color[pred_val[i-start_idx, j]]\n color_gt = class_color[l]\n \n fout.write('v %f %f %f %d %d %d\\n' % (pts[j,0], pts[j,1], pts[j,2], color[0], color[1], color[2]))\n fout_gt.write('v %f %f %f %d %d %d\\n' % (pts[j,0], pts[j,1], pts[j,2], color_gt[0], color_gt[1], color_gt[2]))\n \n log_string('eval mean loss: %f' % (loss_sum / float(total_seen/NUM_POINT)))\n log_string('eval accuracy: %f'% (total_correct / float(total_seen)))\n log_string('eval avg class acc: %f' % (np.mean(np.array(total_correct_class)/np.array(total_seen_class,dtype=np.float))))\n \n\nif __name__ == \"__main__\":\n evaluate()\n LOG_FOUT.close()",
"WARNING:tensorflow:From c:\\Project\\PointNet_Custom_Object_Detection\\model.py:13: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n\nWARNING:tensorflow:From c:\\Project\\PointNet_Custom_Object_Detection\\tf_util.py:145: The name tf.variable_scope is deprecated. Please use tf.compat.v1.variable_scope instead.\n\nWARNING:tensorflow:\nThe TensorFlow contrib module will not be included in TensorFlow 2.0.\nFor more information, please see:\n * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n * https://github.com/tensorflow/addons\n * https://github.com/tensorflow/io (for I/O related ops)\nIf you depend on functionality not listed there, please file an issue.\n\nWARNING:tensorflow:From c:\\Project\\PointNet_Custom_Object_Detection\\tf_util.py:21: The name tf.get_variable is deprecated. Please use tf.compat.v1.get_variable instead.\n\nWARNING:tensorflow:From c:\\Project\\PointNet_Custom_Object_Detection\\tf_util.py:48: The name tf.add_to_collection is deprecated. Please use tf.compat.v1.add_to_collection instead.\n\nWARNING:tensorflow:From c:\\Project\\PointNet_Custom_Object_Detection\\tf_util.py:368: The name tf.nn.max_pool is deprecated. Please use tf.nn.max_pool2d instead.\n\nTensor(\"fc2/Relu:0\", shape=(1, 128), dtype=float32, device=/device:GPU:0)\nWARNING:tensorflow:From c:\\Project\\PointNet_Custom_Object_Detection\\tf_util.py:573: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.\nInstructions for updating:\nPlease use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.\nWARNING:tensorflow:From C:\\Users\\wangp\\Anaconda3\\envs\\open3d\\lib\\site-packages\\tensorflow\\python\\training\\saver.py:1276: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse standard file APIs to check for files with this prefix.\nINFO:tensorflow:Restoring parameters from log/model.ckpt\nModel restored.\n----\n0 1\n(1, 4096)\neval mean loss: 0.005112\neval accuracy: 0.998291\neval avg class acc: 0.998978\n"
],
[
"eval mean loss: 0.297671\neval accuracy: 0.840576\neval avg class acc: 0.707515",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a655b06f04b75926bf02737d938ce9835d56e55
| 555 |
ipynb
|
Jupyter Notebook
|
Test/LogisticRegression/Untitled.ipynb
|
SangeonPark/Bayesian
|
11ad043d22a003f7287d7164360f5caa34f056b1
|
[
"MIT"
] | 4 |
2020-06-27T02:53:35.000Z
|
2020-11-10T06:56:12.000Z
|
Test/LEAF/iid_FEMNIST.ipynb
|
a8252525/Pysyft_experiment
|
1e6c0f52067dc656a55f30b5785749cc5e151fac
|
[
"MIT"
] | 3 |
2021-03-19T13:53:32.000Z
|
2022-03-12T00:38:06.000Z
|
Test/LEAF/iid_FEMNIST.ipynb
|
a8252525/Pysyft_experiment
|
1e6c0f52067dc656a55f30b5785749cc5e151fac
|
[
"MIT"
] | 3 |
2020-05-11T08:30:01.000Z
|
2020-10-06T17:35:25.000Z
| 16.818182 | 34 | 0.526126 |
[] |
[] |
[] |
4a655cfddeefe1768c14235b36714f68a5951cb2
| 10,553 |
ipynb
|
Jupyter Notebook
|
src/pose_estimation/5_OpenPose_training_HomeWork.ipynb
|
makotovnjp/Talent5OpenPose
|
1ebbbd4f226b6839d7d1627d6c33edd416c137fc
|
[
"Apache-2.0"
] | null | null | null |
src/pose_estimation/5_OpenPose_training_HomeWork.ipynb
|
makotovnjp/Talent5OpenPose
|
1ebbbd4f226b6839d7d1627d6c33edd416c137fc
|
[
"Apache-2.0"
] | null | null | null |
src/pose_estimation/5_OpenPose_training_HomeWork.ipynb
|
makotovnjp/Talent5OpenPose
|
1ebbbd4f226b6839d7d1627d6c33edd416c137fc
|
[
"Apache-2.0"
] | null | null | null | 29.810734 | 131 | 0.469061 |
[
[
[
"# Thực hiện học trên model\n",
"_____no_output_____"
]
],
[
[
"# import\nimport random\nimport math\nimport time\nimport pandas as pd\nimport numpy as np\nimport torch\nimport torch.utils.data as data\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\n",
"_____no_output_____"
],
[
"# Thiết định các giá trị ban đầu\ntorch.manual_seed(1234)\nnp.random.seed(1234)\nrandom.seed(1234)\n",
"_____no_output_____"
]
],
[
[
"# Tạo DataLoader",
"_____no_output_____"
]
],
[
[
"from utils.dataloader import make_datapath_list, DataTransform, COCOkeypointsDataset\n\n# Tạo list từ MS COCO\ntrain_img_list, train_mask_list, val_img_list, val_mask_list, train_meta_list, val_meta_list = make_datapath_list(\n rootpath=\"./data/\")\nprint(len(train_img_list))\nprint(len(train_meta_list))\n\n# ★Lấy 1000 data để train\ndata_num = 1024 # bội số của batch size\ntrain_img_list = train_img_list[:data_num]\ntrain_mask_list = train_mask_list[:data_num]\nval_img_list = val_img_list[:data_num]\nval_mask_list = val_mask_list[:data_num]\ntrain_meta_list = train_meta_list[:data_num]\nval_meta_list = val_meta_list[:data_num]\n\n\n# Tạo dataset\ntrain_dataset = COCOkeypointsDataset(\n val_img_list, val_mask_list, val_meta_list, phase=\"train\", transform=DataTransform())\n\n# Để đơn giản hóa trong bài này không tạo dữ liệu đánh giá\n# val_dataset = CocokeypointsDataset(val_img_list, val_mask_list, val_meta_list, phase=\"val\", transform=DataTransform())\n\n# Tạo DataLoader\nbatch_size = 32\n\ntrain_dataloader = data.DataLoader(\n train_dataset, batch_size=batch_size, shuffle=True)\ndataloaders_dict = {\"train\": train_dataloader, \"val\": None}\n",
"121522\n121522\n"
]
],
[
[
"# Tạo Model ",
"_____no_output_____"
]
],
[
[
"from utils.openpose_net import OpenPoseNet\nnet = OpenPoseNet()\n",
"_____no_output_____"
]
],
[
[
"# Định nghĩa hàm mất mát",
"_____no_output_____"
]
],
[
[
"class OpenPoseLoss(nn.Module):\n def __init__(self):\n super(OpenPoseLoss, self).__init__()\n\n def forward(self, saved_for_loss, heatmap_target, heat_mask, paf_target, paf_mask):\n \"\"\"\n tính loss\n Parameters\n ----------\n saved_for_loss : Output ofOpenPoseNet (list)\n\n heatmap_target : [num_batch, 19, 46, 46]\n Anotation information\n\n heatmap_mask : [num_batch, 19, 46, 46]\n \n\n paf_target : [num_batch, 38, 46, 46]\n PAF Anotation\n\n paf_mask : [num_batch, 38, 46, 46]\n PAF mask\n\n Returns\n -------\n loss : \n \"\"\"\n\n total_loss = 0\n \n for j in range(6):\n\n # Không tính những vị trí của mask\n pred1 = saved_for_loss[2 * j] * paf_mask\n gt1 = paf_target.float() * paf_mask\n\n # heatmaps\n pred2 = saved_for_loss[2 * j + 1] * heat_mask\n gt2 = heatmap_target.float()*heat_mask\n\n total_loss += F.mse_loss(pred1, gt1, reduction='mean') + \\\n F.mse_loss(pred2, gt2, reduction='mean')\n\n return total_loss\n\n\ncriterion = OpenPoseLoss()\n",
"_____no_output_____"
]
],
[
[
"# Thiết định optimizer",
"_____no_output_____"
]
],
[
[
"optimizer = optim.SGD(net.parameters(), lr=1e-2,\n momentum=0.9,\n weight_decay=0.0001)\n",
"_____no_output_____"
]
],
[
[
"# Thực hiện việc học",
"_____no_output_____"
]
],
[
[
"def train_model(net, dataloaders_dict, criterion, optimizer, num_epochs):\n # Xem máy train của bạn có dùng gpu hay không\n device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n print(\"Use:\", device)\n\n # chuyển thông tin model vào ram\n net.to(device)\n\n torch.backends.cudnn.benchmark = True\n\n num_train_imgs = len(dataloaders_dict[\"train\"].dataset)\n batch_size = dataloaders_dict[\"train\"].batch_size\n\n iteration = 1\n\n # vòng học\n for epoch in range(num_epochs):\n\n # lưu thời gian bắt đầu học\n t_epoch_start = time.time()\n t_iter_start = time.time()\n epoch_train_loss = 0.0 \n epoch_val_loss = 0.0 \n\n print('-------------')\n print('Epoch {}/{}'.format(epoch+1, num_epochs))\n print('-------------')\n\n # phân loại data học và kiểm chứng\n for phase in ['train', 'val']:\n if phase == 'train':\n net.train() \n optimizer.zero_grad()\n print('(train)')\n\n # lần này bỏ qua thông tin kiểm chứng\n else:\n continue\n # net.eval() \n # print('-------------')\n # print('(val)')\n\n # Lấy từng minibatch files từ data loader\n for imges, heatmap_target, heat_mask, paf_target, paf_mask in dataloaders_dict[phase]:\n if imges.size()[0] == 1:\n continue\n\n # Gửi data đến GPU nếu máy cài GPU\n imges = imges.to(device)\n heatmap_target = heatmap_target.to(device)\n heat_mask = heat_mask.to(device)\n paf_target = paf_target.to(device)\n paf_mask = paf_mask.to(device)\n\n # thiết lập giá trị khởi tạo cho optimizer\n optimizer.zero_grad()\n\n # tính forward\n with torch.set_grad_enabled(phase == 'train'):\n _, saved_for_loss = net(imges)\n\n loss = criterion(saved_for_loss, heatmap_target,\n heat_mask, paf_target, paf_mask)\n del saved_for_loss\n # gửi thông tin loss theo back propagation khi học\n if phase == 'train':\n loss.backward()\n optimizer.step()\n\n if (iteration % 10 == 0):\n t_iter_finish = time.time()\n duration = t_iter_finish - t_iter_start\n print('イテレーション {} || Loss: {:.4f} || 10iter: {:.4f} sec.'.format(\n iteration, loss.item()/batch_size, duration))\n t_iter_start = time.time()\n\n epoch_train_loss += loss.item()\n iteration += 1\n\n # Validation (skip)\n # else:\n #epoch_val_loss += loss.item()\n\n t_epoch_finish = time.time()\n print('-------------')\n print('epoch {} || Epoch_TRAIN_Loss:{:.4f} ||Epoch_VAL_Loss:{:.4f}'.format(\n epoch+1, epoch_train_loss/num_train_imgs, 0))\n print('timer: {:.4f} sec.'.format(t_epoch_finish - t_epoch_start))\n t_epoch_start = time.time()\n\n # Lưu thông tin sau khi học\n torch.save(net.state_dict(), 'weights/openpose_net_' +\n str(epoch+1) + '.pth')\n",
"_____no_output_____"
],
[
"# HỌc (chạy 1 lần)\nnum_epochs = 1\ntrain_model(net, dataloaders_dict, criterion, optimizer, num_epochs=num_epochs)\n",
"Use: cpu\n-------------\nEpoch 1/1\n-------------\n(train)\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
4a656c85c92783b1c46f6da9a0e64b58bc82a006
| 17,401 |
ipynb
|
Jupyter Notebook
|
site/ko/tutorials/distribute/save_and_load.ipynb
|
wakamezake/docs-l10n
|
5d282ddaf4444058ea12852a06d5ccf19967436e
|
[
"Apache-2.0"
] | null | null | null |
site/ko/tutorials/distribute/save_and_load.ipynb
|
wakamezake/docs-l10n
|
5d282ddaf4444058ea12852a06d5ccf19967436e
|
[
"Apache-2.0"
] | null | null | null |
site/ko/tutorials/distribute/save_and_load.ipynb
|
wakamezake/docs-l10n
|
5d282ddaf4444058ea12852a06d5ccf19967436e
|
[
"Apache-2.0"
] | null | null | null | 30.421329 | 393 | 0.508706 |
[
[
[
"##### Copyright 2019 The TensorFlow Authors.\n\n",
"_____no_output_____"
]
],
[
[
"#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.",
"_____no_output_____"
]
],
[
[
"# 분석전략을 사용한 모델 저장 및 불러오기",
"_____no_output_____"
],
[
"<table class=\"tfo-notebook-buttons\" align=\"left\">\n <td>\n <a target=\"_blank\" href=\"https://www.tensorflow.org/tutorials/distribute/save_and_load\"><img src=\"https://www.tensorflow.org/images/tf_logo_32px.png\" />TensorFlow.org에서 보기</a>\n </td>\n <td>\n <a target=\"_blank\" href=\"https://colab.research.google.com/github/tensorflow/docs-l10n/blob/master/site/ko/tutorials/distribute/save_and_load.ipynb\"><img src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" />구글 코랩(Colab)에서 실행하기</a>\n </td>\n <td>\n <a target=\"_blank\" href=\"https://github.com/tensorflow/docs-l10n/blob/master/site/ko/tutorials/distribute/save_and_load.ipynb\"><img src=\"https://www.tensorflow.org/images/GitHub-Mark-32px.png\" />깃허브(GitHub)소스 보기</a>\n </td>\n <td>\n <a href=\"https://storage.googleapis.com/tensorflow_docs/docs-l10n/site/ko/tutorials/distribute/save_and_load.ipynb\"><img src=\"https://www.tensorflow.org/images/download_logo_32px.png\" />노트북 다운로드 하기</a>\n </td>\n\n</table>",
"_____no_output_____"
],
[
"Note: 이 문서는 텐서플로 커뮤니티에서 번역했습니다. 커뮤니티 번역 활동의 특성상 정확한 번역과 최신 내용을 반영하기 위해 노력함에도 불구하고 [공식 영문 문서](https://www.tensorflow.org/?hl=en)의 내용과 일치하지 않을 수 있습니다. 이 번역에 개선할 부분이 있다면 [tensorflow/docs](https://github.com/tensorflow/docs-l10n) 깃허브 저장소로 풀 리퀘스트를 보내주시기 바랍니다. 문서 번역이나 리뷰에 참여하려면 [[email protected]](https://groups.google.com/a/tensorflow.org/forum/#!forum/docs-ko)로 메일을 보내주시기 바랍니다.",
"_____no_output_____"
],
[
"## 개요\n\n훈련 도중 모델을 저장하고 불러오는 것은 흔히 일어나는 일입니다. 케라스 모델을 저장하고 불러오기 위한 API에는 high-level API와 low-level API, 두 가지가 있습니다. 이 튜토리얼은 `tf.distribute.Strategy`를 사용할 때 어떻게 SavedModel APIs를 사용할 수 있는지 보여줍니다. SavedModel와 직렬화에 관한 일반적인 내용을 학습하려면, [saved model guide](../../guide/saved_model.ipynb)와 [Keras model serialization guide](../../guide/keras/save_and_serialize.ipynb)를 읽어보는 것을 권장합니다. 간단한 예로 시작해보겠습니다:",
"_____no_output_____"
],
[
"필요한 패키지 가져오기:",
"_____no_output_____"
]
],
[
[
"from __future__ import absolute_import, division, print_function, unicode_literals\n\ntry:\n # %텐서플로 버전(tensorflow_version) 코랩(Colab)에만 존재합니다. \n %tensorflow_version 2.x\nexcept Exception:\n pass\nimport tensorflow_datasets as tfds\n\nimport tensorflow as tf\ntfds.disable_progress_bar()",
"_____no_output_____"
]
],
[
[
"`tf.distribute.Strategy`를 사용하는 모델과 데이터 준비하기:",
"_____no_output_____"
],
[
"모델 훈련시키기: ",
"_____no_output_____"
]
],
[
[
"model = get_model()\ntrain_dataset, eval_dataset = get_data()\nmodel.fit(train_dataset, epochs=2)",
"_____no_output_____"
]
],
[
[
"## 모델 저장하고 불러오기\n이제 사용할 모델을 가지고 있으므로 API를 이용해 모델을 저장하고 불러오는 방법에 대해 살펴봅시다. \n두 가지 API를 사용 할 수 있습니다:\n\n* 고수준 케라스 `model.save`와 `tf.keras.models.load_model`\n* 저수준 케라스 `tf.saved_model.save`와 `tf.saved_model.load`\n\n",
"_____no_output_____"
],
[
"### 케라스 API",
"_____no_output_____"
],
[
"케라스 API들을 이용해 모델을 저장하고 불러오는 예를 소개합니다.",
"_____no_output_____"
]
],
[
[
"keras_model_path = \"/tmp/keras_save\"\nmodel.save(keras_model_path) # save()는 전략 범위를 벗어나 호출되어야 합니다.",
"_____no_output_____"
]
],
[
[
"`tf.distribute.Strategy`없이 모델 복원시키기:",
"_____no_output_____"
]
],
[
[
"restored_keras_model = tf.keras.models.load_model(keras_model_path)\nrestored_keras_model.fit(train_dataset, epochs=2)",
"_____no_output_____"
]
],
[
[
"모델을 복원시킨 후에는 `compile()`이 이미 저장되기 전에 컴파일 되기 때문에, `compile()`을 다시 호출하지 않고도 모델 훈련을 계속 할 수 있습니다. 그 모델은 텐서플로 표준 `SavedModel`의 프로토 타입에 저장됩니다. 더 많은 정보를 원한다면, [the guide to `saved_model` format](../../guide/saved_model.ipynb)를 참고하세요.\n\n`tf.distribute.strategy`의 범위를 벗어나서 `model.save()` 방법을 호출하는 것은 중요합니다. 범위 안에서 호출하는 것은 지원하지 않습니다. \n\n이제 모델을 불러와서 `tf.distribute.Strategy`를 사용해 훈련시킵니다:",
"_____no_output_____"
]
],
[
[
"another_strategy = tf.distribute.OneDeviceStrategy(\"/cpu:0\")\nwith another_strategy.scope():\n restored_keras_model_ds = tf.keras.models.load_model(keras_model_path)\n restored_keras_model_ds.fit(train_dataset, epochs=2)",
"_____no_output_____"
]
],
[
[
"위에서 볼 수 있듯이, 불러오기는 `tf.distribute.Strategy`에서 예상한대로 작동합니다. 여기서 사용된 전략은 이전에 사용된 전략과 같지 않아도 됩니다.",
"_____no_output_____"
],
[
"### `tf.saved_model` 형 API",
"_____no_output_____"
],
[
"이제 저수준 API에 대해서 살펴봅시다. 모델을 저장하는 것은 케라스 API와 비슷합니다:",
"_____no_output_____"
]
],
[
[
"model = get_model() # 새 모델 얻기\nsaved_model_path = \"/tmp/tf_save\"\ntf.saved_model.save(model, saved_model_path)",
"_____no_output_____"
]
],
[
[
"`tf.saved_model.load()`로 불러올 수 있습니다. 그러나 저수준 단계의 API이기 때문에 (따라서 더 넓은 사용범위를 갖습니다), 케라스 모델을 반환하지 않습니다. 대신, 추론하기 위해 사용될 수 있는 기능들을 포함한 객체를 반환합니다. 예를 들어:",
"_____no_output_____"
]
],
[
[
"DEFAULT_FUNCTION_KEY = \"serving_default\"\nloaded = tf.saved_model.load(saved_model_path)\ninference_func = loaded.signatures[DEFAULT_FUNCTION_KEY]",
"_____no_output_____"
]
],
[
[
"불러와진 객체는 각각 키와 관련된 채, 여러 기능을 포함할 수 있습니다. `\"serving_default\"`는 저장된 케라스 모델이 있는 추론 기능을 위한 기본 키입니다. 이 기능을 이용하여 추론합니다: ",
"_____no_output_____"
]
],
[
[
"predict_dataset = eval_dataset.map(lambda image, label: image)\nfor batch in predict_dataset.take(1):\n print(inference_func(batch))",
"_____no_output_____"
]
],
[
[
"또한 분산방식으로 불러오고 추론할 수 있습니다:",
"_____no_output_____"
]
],
[
[
"another_strategy = tf.distribute.MirroredStrategy()\nwith another_strategy.scope():\n loaded = tf.saved_model.load(saved_model_path)\n inference_func = loaded.signatures[DEFAULT_FUNCTION_KEY]\n\n dist_predict_dataset = another_strategy.experimental_distribute_dataset(\n predict_dataset)\n\n # 분산방식으로 기능 호출하기\n for batch in dist_predict_dataset:\n another_strategy.experimental_run_v2(inference_func, \n args=(batch,))",
"_____no_output_____"
]
],
[
[
"복원된 기능을 호출하는 것은 단지 저장된 모델로의 정방향 패쓰입니다(예상하기에). 만약 계속해서 불러온 기능을 훈련시키고 싶다면 어떻게 하실건가요? 불러온 기능을 더 큰 모델에 내장시킬 건가요? 일반적인 방법은 이 불러온 객체를 케라스 층에 싸서(wrap) 달성하는 것입니다. 다행히도, [TF Hub](https://www.tensorflow.org/hub)는 이 목적을 위해 [hub.KerasLayer](https://github.com/tensorflow/hub/blob/master/tensorflow_hub/keras_layer.py)을 갖고 있으며, 다음과 같습니다. ",
"_____no_output_____"
]
],
[
[
"import tensorflow_hub as hub\n\ndef build_model(loaded):\n x = tf.keras.layers.Input(shape=(28, 28, 1), name='input_x')\n # 케라스 층에 불러온 것 포장(wrap)하기\n keras_layer = hub.KerasLayer(loaded, trainable=True)(x)\n model = tf.keras.Model(x, keras_layer)\n return model\n\nanother_strategy = tf.distribute.MirroredStrategy()\nwith another_strategy.scope():\n loaded = tf.saved_model.load(saved_model_path)\n model = build_model(loaded)\n\n model.compile(loss='sparse_categorical_crossentropy',\n optimizer=tf.keras.optimizers.Adam(),\n metrics=['accuracy'])\n model.fit(train_dataset, epochs=2)",
"_____no_output_____"
]
],
[
[
"볼 수 있듯이, `hub.KerasLayer`은 `tf.saved_model.load()`로부터 불려온 결과를 또 다른 모델을 만드는데 사용될 수 있는 케라스 층으로 포장(wrap)합니다. 이것은 학습에 매우 유용합니다.",
"_____no_output_____"
],
[
"### 어떤 API를 사용해야 할까요?",
"_____no_output_____"
],
[
"저장에 관해서, 케라스 모델을 사용하는 경우, 케라스의 `model.save()` API를 사용하는 것을 권장합니다. 저장하려는 모델이 케라스 모델이 아닌 경우, 더 낮은 단계의 API를 선택해야 합니다.\n\n모델을 불러옴에 있어서, 어떤 API를 사용하느냐는 로딩 API에서 얻고자 하는 내용에 따라 결정됩니다. 케라스 모델을 가져올 수 없으면(또는 가져오고 싶지 않다면), `tf.saved_model.load()`를 사용합니다. 그 외의 경우에는, `tf.keras.models.load_model()`을 사용합니다. 케라스 모델을 저장한 경우에만 케라스 모델을 반환 받을 수 있다는 점을 유의하세요. \n\nAPI들을 목적에 따라 혼합하고 짜 맞추는 것이 가능합니다. 케라스 모델을 `model.save`와 함께 저장할 수 있고, 저수준 API인, `tf.saved_model.load`로 케라스가 아닌 모델을 불러올 수 있습니다.",
"_____no_output_____"
]
],
[
[
"model = get_model()\n\n# 케라스의 save() API를 사용하여 모델 저장하기\nmodel.save(keras_model_path) \n\nanother_strategy = tf.distribute.MirroredStrategy()\n# 저수준 API를 사용하여 모델 불러오기\nwith another_strategy.scope():\n loaded = tf.saved_model.load(keras_model_path)",
"_____no_output_____"
]
],
[
[
"### 주의사항",
"_____no_output_____"
],
[
"특별한 경우는 잘 정의되지 않은 입력을 갖는 케라스 모델을 갖고 있는 경우입니다. 예를 들어, 순차 모델은 입력 형태(`Sequential([Dense(3), ...]`) 없이 만들 수 있습니다. 하위 분류된 모델들 또한 초기화 후에 잘 정의된 입력을 갖고 있지 않습니다. 이 경우 모델을 저장하고 불러올 시 저수준 API를 사용해야 하며, 그렇지 않으면 오류가 발생할 수 있습니다.\n\n모델이 잘 정의된 입력을 갖는지 확인하려면, `model.inputs` 이 `None`인지 확인합니다. `None`이 아니라면 잘 정의된 입력입니다. 입력 형태들은 모델이 `.fit`, `.evaluate`, `.predict`에서 쓰이거나 모델을 호출 (`model(inputs)`) 할 때 자동으로 정의됩니다. \n\n예시를 살펴봅시다:",
"_____no_output_____"
]
],
[
[
"class SubclassedModel(tf.keras.Model):\n\n output_name = 'output_layer'\n\n def __init__(self):\n super(SubclassedModel, self).__init__()\n self._dense_layer = tf.keras.layers.Dense(\n 5, dtype=tf.dtypes.float32, name=self.output_name)\n\n def call(self, inputs):\n return self._dense_layer(inputs)\n\nmy_model = SubclassedModel()\n# my_model.save(keras_model_path) # 오류! \ntf.saved_model.save(my_model, saved_model_path)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
]
] |
4a656d2fc188f6cd7b4618f81180bf37a5db42e7
| 28,501 |
ipynb
|
Jupyter Notebook
|
freecodecamp/courses/data_analysis/Exercises_1.ipynb
|
pbittencourt/python4DS
|
85f0b2a4366fe7c6daa5628ed4bd2994355963c0
|
[
"MIT"
] | null | null | null |
freecodecamp/courses/data_analysis/Exercises_1.ipynb
|
pbittencourt/python4DS
|
85f0b2a4366fe7c6daa5628ed4bd2994355963c0
|
[
"MIT"
] | null | null | null |
freecodecamp/courses/data_analysis/Exercises_1.ipynb
|
pbittencourt/python4DS
|
85f0b2a4366fe7c6daa5628ed4bd2994355963c0
|
[
"MIT"
] | null | null | null | 21.559002 | 141 | 0.539385 |
[
[
[
"\n<hr style=\"margin-bottom: 40px;\">\n\n<img src=\"https://user-images.githubusercontent.com/7065401/58563302-42466a80-8201-11e9-9948-b3e9f88a5662.jpg\"\n style=\"width:400px; float: right; margin: 0 40px 40px 40px;\"></img>\n\n# Exercises\n## Bike store sales",
"_____no_output_____"
],
[
"\n\n## Hands on! ",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\n%matplotlib inline",
"_____no_output_____"
],
[
"sales = pd.read_csv(\n 'data/sales_data.csv',\n parse_dates=['Date'])",
"_____no_output_____"
],
[
"sales.head()",
"_____no_output_____"
]
],
[
[
"\n\n### What's the mean of `Customers_Age`?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
]
],
[
[
"Why don't you try with `.mean()`",
"_____no_output_____"
]
],
[
[
"sales['Customer_Age'].mean()",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>density (KDE)</b> and a <b>box plot</b> with the `Customer_Age` data:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Customer_Age'].plot(kind='kde', figsize=(14,6))",
"_____no_output_____"
],
[
"sales['Customer_Age'].plot(kind='box', vert=False, figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### What's the mean of `Order_Quantity`?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Order_Quantity'].mean()",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>histogram</b> and a <b>box plot</b> with the `Order_Quantity` data:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Order_Quantity'].plot(kind='hist', bins=30, figsize=(14,6))",
"_____no_output_____"
],
[
"sales['Order_Quantity'].plot(kind='box', vert=False, figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### How many sales per year do we have?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Year'].value_counts()",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>pie plot</b> with the previous data:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Year'].value_counts().plot(kind='pie', figsize=(6,6))",
"_____no_output_____"
]
],
[
[
"\n\n### How many sales per month do we have?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Month'].value_counts()",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>bar plot</b> with the previous data:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Month'].value_counts().plot(kind='bar', figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Which country has the most sales `quantity of sales`?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Country'].value_counts().head(1)",
"_____no_output_____"
],
[
"sales['Country'].value_counts()",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>bar plot</b> of the sales per country:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Country'].value_counts().plot(kind='bar', figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Create a list of every product sold",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"#sales.loc[:, 'Product'].unique()\n\nsales['Product'].unique()",
"_____no_output_____"
]
],
[
[
"Create a **bar plot** showing the 10 most sold products (best sellers):",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Product'].value_counts().head(10).plot(kind='bar', figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Can you see any relationship between `Unit_Cost` and `Unit_Price`?\n\nShow a <b>scatter plot</b> between both columns.",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales.plot(kind='scatter', x='Unit_Cost', y='Unit_Price', figsize=(6,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Can you see any relationship between `Order_Quantity` and `Profit`?\n\nShow a <b>scatter plot</b> between both columns.",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales.plot(kind='scatter', x='Order_Quantity', y='Profit', figsize=(6,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Can you see any relationship between `Profit` per `Country`?\n\nShow a grouped <b>box plot</b> per country with the profit values.",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales[['Profit', 'Country']].boxplot(by='Country', figsize=(10,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Can you see any relationship between the `Customer_Age` per `Country`?\n\nShow a grouped <b>box plot</b> per country with the customer age values.",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales[['Customer_Age', 'Country']].boxplot(by='Country', figsize=(10,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Add and calculate a new `Calculated_Date` column\n\nUse `Day`, `Month`, `Year` to create a `Date` column (`YYYY-MM-DD`).",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Calculated_Date'] = sales[['Year', 'Month', 'Day']].apply(lambda x: '{}-{}-{}'.format(x[0], x[1], x[2]), axis=1)\n\nsales['Calculated_Date'].head()",
"_____no_output_____"
]
],
[
[
"\n\n### Parse your `Calculated_Date` column into a datetime object",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Calculated_Date'] = pd.to_datetime(sales['Calculated_Date'])\n\nsales['Calculated_Date'].head()",
"_____no_output_____"
]
],
[
[
"\n\n### How did sales evolve through the years?\n\nShow a <b>line plot</b> using `Calculated_Date` column as the x-axis and the count of sales as the y-axis.",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Calculated_Date'].value_counts().plot(kind='line', figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Increase 50 U$S revenue to every sale",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"#sales['Revenue'] = sales['Revenue'] + 50\n\nsales['Revenue'] += 50",
"_____no_output_____"
]
],
[
[
"\n\n### How many orders were made in `Canada` or `France`?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales.loc[(sales['Country'] == 'Canada') | (sales['Country'] == 'France')].shape[0]",
"_____no_output_____"
]
],
[
[
"\n\n### How many `Bike Racks` orders were made from Canada?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales.loc[(sales['Country'] == 'Canada') & (sales['Sub_Category'] == 'Bike Racks')].shape[0]",
"_____no_output_____"
]
],
[
[
"\n\n### How many orders were made in each region (state) of France?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"france_states = sales.loc[sales['Country'] == 'France', 'State'].value_counts()\n\nfrance_states",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>bar plot</b> with the results:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"france_states.plot(kind='bar', figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### How many sales were made per category?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Product_Category'].value_counts()",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>pie plot</b> with the results:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Product_Category'].value_counts().plot(kind='pie', figsize=(6,6))",
"_____no_output_____"
]
],
[
[
"\n\n### How many orders were made per accessory sub-categories?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"accessories = sales.loc[sales['Product_Category'] == 'Accessories', 'Sub_Category'].value_counts()\n\naccessories",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>bar plot</b> with the results:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"accessories.plot(kind='bar', figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### How many orders were made per bike sub-categories?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"bikes = sales.loc[sales['Product_Category'] == 'Bikes', 'Sub_Category'].value_counts()\n\nbikes",
"_____no_output_____"
]
],
[
[
"Go ahead and show a <b>pie plot</b> with the results:",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"bikes.plot(kind='pie', figsize=(6,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Which gender has the most amount of sales?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales['Customer_Gender'].value_counts()",
"_____no_output_____"
],
[
"sales['Customer_Gender'].value_counts().plot(kind='bar')",
"_____no_output_____"
]
],
[
[
"\n\n### How many sales with more than 500 in `Revenue` were made by men?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales.loc[(sales['Customer_Gender'] == 'M') & (sales['Revenue'] == 500)].shape[0]",
"_____no_output_____"
]
],
[
[
"\n\n### Get the top-5 sales with the highest revenue",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"sales.sort_values(['Revenue'], ascending=False).head(5)",
"_____no_output_____"
]
],
[
[
"\n\n### Get the sale with the highest revenue",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"#sales.sort_values(['Revenue'], ascending=False).head(1)\n\ncond = sales['Revenue'] == sales['Revenue'].max()\n\nsales.loc[cond]",
"_____no_output_____"
]
],
[
[
"\n\n### What is the mean `Order_Quantity` of orders with more than 10K in revenue?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"cond = sales['Revenue'] > 10_000\n\nsales.loc[cond, 'Order_Quantity'].mean()",
"_____no_output_____"
]
],
[
[
"\n\n### What is the mean `Order_Quantity` of orders with less than 10K in revenue?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"cond = sales['Revenue'] < 10_000\n\nsales.loc[cond, 'Order_Quantity'].mean()",
"_____no_output_____"
]
],
[
[
"\n\n### How many orders were made in May of 2016?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"cond = (sales['Year'] == 2016) & (sales['Month'] == 'May')\n\nsales.loc[cond].shape[0]",
"_____no_output_____"
]
],
[
[
"\n\n### How many orders were made between May and July of 2016?",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"cond = (sales['Year'] == 2016) & (sales['Month'].isin(['May', 'June', 'July']))\n\nsales.loc[cond].shape[0]",
"_____no_output_____"
]
],
[
[
"Show a grouped <b>box plot</b> per month with the profit values.",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"profit_2016 = sales.loc[sales['Year'] == 2016, ['Profit', 'Month']]\n\nprofit_2016.boxplot(by='Month', figsize=(14,6))",
"_____no_output_____"
]
],
[
[
"\n\n### Add 7.2% TAX on every sale `Unit_Price` within United States",
"_____no_output_____"
]
],
[
[
"# your code goes here\n",
"_____no_output_____"
],
[
"#sales.loc[sales['Country'] == 'United States', 'Unit_Price'] = sales.loc[sales['Country'] == 'United States', 'Unit_Price'] * 1.072\n\nsales.loc[sales['Country'] == 'United States', 'Unit_Price'] *= 1.072",
"_____no_output_____"
]
],
[
[
"",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
]
] |
4a658d9417ddc9171cf2c256755c2ff905e5e60b
| 16,293 |
ipynb
|
Jupyter Notebook
|
examples/notebooks/Distributional_Expectations_Demo.ipynb
|
ag1011/great_expectations
|
9e57a55d49a98442e59e5251b2ef87c5e3c90838
|
[
"Apache-2.0"
] | 2 |
2020-05-07T18:16:17.000Z
|
2020-05-07T18:16:21.000Z
|
examples/notebooks/Distributional_Expectations_Demo.ipynb
|
ag1011/great_expectations
|
9e57a55d49a98442e59e5251b2ef87c5e3c90838
|
[
"Apache-2.0"
] | 1 |
2020-03-26T12:34:24.000Z
|
2020-03-26T12:34:24.000Z
|
examples/notebooks/Distributional_Expectations_Demo.ipynb
|
ag1011/great_expectations
|
9e57a55d49a98442e59e5251b2ef87c5e3c90838
|
[
"Apache-2.0"
] | 1 |
2022-02-10T04:20:37.000Z
|
2022-02-10T04:20:37.000Z
| 24.390719 | 440 | 0.573498 |
[
[
[
"# Dataset from here\n# https://archive.ics.uci.edu/ml/datasets/Adult",
"_____no_output_____"
],
[
"import great_expectations as ge\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np\n%matplotlib inline",
"_____no_output_____"
],
[
"\"\"\"\nage: continuous.\nworkclass: Private, Self-emp-not-inc, Self-emp-inc, Federal-gov, Local-gov, State-gov, Without-pay, Never-worked.\nfnlwgt: continuous.\neducation: Bachelors, Some-college, 11th, HS-grad, Prof-school, Assoc-acdm, Assoc-voc, 9th, 7th-8th, 12th, Masters, 1st-4th, 10th, Doctorate, 5th-6th, Preschool.\neducation-num: continuous.\nmarital-status: Married-civ-spouse, Divorced, Never-married, Separated, Widowed, Married-spouse-absent, Married-AF-spouse.\noccupation: Tech-support, Craft-repair, Other-service, Sales, Exec-managerial, Prof-specialty, Handlers-cleaners, Machine-op-inspct, Adm-clerical, Farming-fishing, Transport-moving, Priv-house-serv, Protective-serv, Armed-Forces.\nrelationship: Wife, Own-child, Husband, Not-in-family, Other-relative, Unmarried.\nrace: White, Asian-Pac-Islander, Amer-Indian-Eskimo, Other, Black.\nsex: Female, Male.\ncapital-gain: continuous.\ncapital-loss: continuous.\nhours-per-week: continuous.\nnative-country: United-States, Cambodia, England, Puerto-Rico, Canada, Germany, Outlying-US(Guam-USVI-etc), India, Japan, Greece, South, China, Cuba, Iran, Honduras, Philippines, Italy, Poland, Jamaica, Vietnam, Mexico, Portugal, Ireland, France, Dominican-Republic, Laos, Ecuador, Taiwan, Haiti, Columbia, Hungary, Guatemala, Nicaragua, Scotland, Thailand, Yugoslavia, El-Salvador, Trinadad&Tobago, Peru, Hong, Holand-Netherlands.\n\"\"\"\ncategorical_columns = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', 'race', 'sex', 'native-country']\ncontinuous_columns = ['age', 'fnlwgt', 'education-num', 'capital-gain', 'capital-loss', 'hours-per-week']",
"_____no_output_____"
],
[
"df = ge.read_csv('../data/adult.data.b_2_train.csv')\ndf_test = ge.read_csv('../data/adult.data.b_2_test.csv')\n\ndf.head()",
"_____no_output_____"
],
[
"df.shape",
"_____no_output_____"
],
[
"df.expect_column_values_to_be_in_set('sex', ['Female', 'Male'])",
"_____no_output_____"
],
[
"def strip_spaces(df):\n for column in df.columns:\n if isinstance(df[column][0], str):\n df[column] = df[column].apply(str.strip)\n\nstrip_spaces(df)\nstrip_spaces(df_test)",
"_____no_output_____"
],
[
"df.expect_column_values_to_be_in_set('sex', ['Female', 'Male'])",
"_____no_output_____"
],
[
"df['y'] = df['<=50k'].apply(lambda x: 0 if (x == '<=50K') else 1)\ndf_test['y'] = df_test['<=50k'].apply(lambda x: 0 if (x == '<=50K') else 1)",
"_____no_output_____"
],
[
"df['sex'].value_counts().plot(kind='bar')",
"_____no_output_____"
],
[
"sex_partition = ge.dataset.util.categorical_partition_data(df['sex'])\ndf.expect_column_chisquare_test_p_value_to_be_greater_than('sex', sex_partition)",
"_____no_output_____"
],
[
"df_test.expect_column_chisquare_test_p_value_to_be_greater_than('sex', sex_partition, output_format='SUMMARY')",
"_____no_output_____"
],
[
"plt.hist(df['age'])",
"_____no_output_____"
],
[
"age_partition = ge.dataset.util.continuous_partition_data(df['age'])\ndf.expect_column_bootstrapped_ks_test_p_value_to_be_greater_than('age', age_partition)",
"_____no_output_____"
],
[
"out = df_test.expect_column_bootstrapped_ks_test_p_value_to_be_greater_than('age', age_partition, output_format='SUMMARY')\nprint(out)",
"_____no_output_____"
],
[
"plt.plot(out['summary_obj']['expected_cdf']['x'], out['summary_obj']['expected_cdf']['cdf_values'])\nplt.plot(out['summary_obj']['observed_cdf']['x'], out['summary_obj']['observed_cdf']['cdf_values'])",
"_____no_output_____"
],
[
"plt.plot(out['summary_obj']['expected_partition']['bins'][1:], out['summary_obj']['expected_partition']['weights'])\nplt.plot(out['summary_obj']['observed_partition']['bins'][1:], out['summary_obj']['observed_partition']['weights'])",
"_____no_output_____"
],
[
"df['<=50k'].value_counts().plot(kind='bar')",
"_____no_output_____"
],
[
"df['education'].value_counts().plot(kind='bar')",
"_____no_output_____"
],
[
"education_partition = ge.dataset.util.categorical_partition_data(df['education'])\ndf.expect_column_chisquare_test_p_value_to_be_greater_than('education', education_partition)",
"_____no_output_____"
],
[
"df_test['education'].value_counts().plot(kind='bar')\ndf_test.expect_column_chisquare_test_p_value_to_be_greater_than('education', education_partition)",
"_____no_output_____"
],
[
"df_test.expect_column_kl_divergence_to_be_less_than('education', education_partition, threshold=0.1)",
"_____no_output_____"
],
[
"plt.hist(df['education-num'])",
"_____no_output_____"
],
[
"education_num_partition_auto = ge.dataset.util.continuous_partition_data(df['education-num'])\ndf.expect_column_bootstrapped_ks_test_p_value_to_be_greater_than('education-num', education_num_partition_auto)",
"_____no_output_____"
],
[
"education_num_partition_auto",
"_____no_output_____"
],
[
"education_num_partition_cat = ge.dataset.util.categorical_partition_data(df['education-num'])\ndf.expect_column_chisquare_test_p_value_to_be_greater_than('education-num', education_num_partition_cat)",
"_____no_output_____"
],
[
"df_test.expect_column_chisquare_test_p_value_to_be_greater_than('education-num', education_num_partition_cat)",
"_____no_output_____"
],
[
"education_num_partition = ge.dataset.util.continuous_partition_data(df['education-num'], bins='uniform', n_bins=10)\ndf.expect_column_bootstrapped_ks_test_p_value_to_be_greater_than('education-num', education_num_partition)",
"_____no_output_____"
],
[
"s1 = df['education'][df['y'] == 1].value_counts()\ns1.name = 'education_y_1'\ns2 = df['education'][df['y'] == 0].value_counts()\ns2.name = 'education_y_0'\nplotter = pd.concat([s1, s2], axis=1)",
"_____no_output_____"
],
[
"p1 = plt.bar(range(len(plotter)), plotter['education_y_0'])\np2 = plt.bar(range(len(plotter)), plotter['education_y_1'], bottom=plotter['education_y_0'])\n\nplt.xticks(range(len(plotter)), plotter.index, rotation='vertical')\nplt.show()",
"_____no_output_____"
],
[
"df.get_expectation_suite()",
"_____no_output_____"
],
[
"from sklearn.preprocessing import OneHotEncoder, LabelEncoder\nfrom sklearn.ensemble import RandomForestClassifier",
"_____no_output_____"
],
[
"def build_transformer(df_train):\n le = {}\n ohe = OneHotEncoder()\n X_cat = pd.DataFrame()\n for cat_column in categorical_columns:\n le[cat_column] = LabelEncoder()\n X_cat[cat_column + '_le'] = le[cat_column].fit_transform(df_train[cat_column])\n X_cat = ohe.fit_transform(X_cat)\n X_train = np.append(X_cat.toarray(), df_train[continuous_columns], axis=1)\n return le, ohe, X_train\n\ndef apply_transformer(le, ohe, df_test):\n X_cat = pd.DataFrame()\n for cat_column in categorical_columns:\n X_cat[cat_column + '_le'] = le[cat_column].transform(df_test[cat_column])\n X_cat = ohe.transform(X_cat)\n X_test = np.append(X_cat.toarray(), df_test[continuous_columns], axis=1)\n return X_test",
"_____no_output_____"
],
[
"clf = RandomForestClassifier()",
"_____no_output_____"
],
[
"le, ohe, X_train = build_transformer(df)",
"_____no_output_____"
],
[
"clf.fit(X_train, df['y'])",
"_____no_output_____"
],
[
"clf.score(X_train, df['y'])",
"_____no_output_____"
],
[
"my_expectations = df.get_expectation_suite()",
"_____no_output_____"
],
[
"my_expectations",
"_____no_output_____"
],
[
"results = df_test.validate(expectation_suite=my_expectations)\nresults",
"_____no_output_____"
],
[
"failures = df_test.validate(expectation_suite=my_expectations, only_return_failures=True)\nfailures",
"_____no_output_____"
],
[
"X_test = apply_transformer(le, ohe, df_test)",
"_____no_output_____"
],
[
"clf.score(X_test, df_test['y'])",
"_____no_output_____"
],
[
"df_test_2 = ge.read_csv('../data/adult.data.b_1_train.csv')\nstrip_spaces(df_test_2)\n#df_test_2 = df_test_2[df_test_2['native-country'] != 'Holand-Netherlands']\ndf_test_2['y'] = df_test_2['<=50k'].apply(lambda x: 0 if (x == '<=50K') else 1)\nX_test_2 = apply_transformer(le, ohe, df_test_2)",
"_____no_output_____"
],
[
"clf.score(X_test_2, df_test_2['y'])",
"_____no_output_____"
],
[
"# Health Screening: Preventative Checkup!",
"_____no_output_____"
],
[
"failures = df_test_2.validate(my_expectations, only_return_failures=True, output_format='SUMMARY')\nfailures",
"_____no_output_____"
],
[
"df_test_2['sex'].value_counts().plot(kind='bar')",
"_____no_output_____"
],
[
"df_test_3 = ge.read_csv('../data/adult.data.b_1_test.csv')\nstrip_spaces(df_test_3)\n#df_test_3 = df_test_3[df_test_3['native-country'] != 'Holand-Netherlands']\ndf_test_3['y'] = df_test_3['<=50k'].apply(lambda x: 0 if (x == '<=50K') else 1)\nX_test_3 = apply_transformer(le, ohe, df_test_3)",
"_____no_output_____"
],
[
"clf.score(X_test_3, df_test_3['y'])",
"_____no_output_____"
],
[
"#What could have gone wrong?\n#\n# a. The world changed.\n# b. New sensor means different data.\n# c. Bueller? Bueller?\n# d. Biased sample of the data\n",
"_____no_output_____"
],
[
"result = df_test_2.validate(my_expectations, only_return_failures=True, output_format='SUMMARY')\nfailures",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a65a89503cb96611e3d3700f27875e208422e02
| 172,598 |
ipynb
|
Jupyter Notebook
|
Understanding_and_Creating_Binary_Classification_NNs/3_layer_toy_neural_network_on_iris_sepals.ipynb
|
RafayAK/NothingButNumPy
|
5cc32a517684c7e68cce44412f7d7c759ecb299e
|
[
"MIT"
] | 48 |
2019-07-18T17:59:23.000Z
|
2022-03-06T07:08:55.000Z
|
Understanding_and_Creating_Binary_Classification_NNs/3_layer_toy_neural_network_on_iris_sepals.ipynb
|
kewlcoder/NothingButNumPy
|
5cc32a517684c7e68cce44412f7d7c759ecb299e
|
[
"MIT"
] | 2 |
2019-11-15T07:26:58.000Z
|
2019-11-21T15:29:56.000Z
|
Understanding_and_Creating_Binary_Classification_NNs/3_layer_toy_neural_network_on_iris_sepals.ipynb
|
kewlcoder/NothingButNumPy
|
5cc32a517684c7e68cce44412f7d7c759ecb299e
|
[
"MIT"
] | 23 |
2019-08-25T01:54:15.000Z
|
2021-09-28T02:39:06.000Z
| 193.278835 | 49,296 | 0.905358 |
[
[
[
"# Nothing But NumPy: A 3-layer Binary Classification Neural Network on Iris Flowers\n\nPart of the blog [\"Nothing but NumPy: Understanding & Creating Binary Classification Neural Networks with Computational Graphs from Scratch\"](https://medium.com/@rafayak/nothing-but-numpy-understanding-creating-binary-classification-neural-networks-with-e746423c8d5c)- by [Rafay Khan](https://twitter.com/RafayAK)\n\nIn this notebook we'll create a 3-layer neural network (i.e. one input one, one hidden layer and one output layer) and train it on Iris dataset using _only_ **sepals** as input features to classify **Iris-virginica vs. others**\n\nFirst, let's import NumPy, our neural net layers, the Binary Cross-Entropy(bce) Cost function and helper functions.\n\n_Feel free to look into the helper functions in the utils directory._",
"_____no_output_____"
]
],
[
[
"import numpy as np\nfrom Layers.LinearLayer import LinearLayer\nfrom Layers.ActivationLayer import SigmoidLayer\nfrom util.utilities import *\nfrom util.cost_functions import compute_stable_bce_cost\nimport matplotlib.pyplot as plt\n\n# to show all the generated plots inline in the notebook\n%matplotlib inline",
"_____no_output_____"
]
],
[
[
"",
"_____no_output_____"
],
[
"For convenience we'll load the data through [scikit-learn](https://scikit-learn.org/stable/index.html#). \n\nIf you don't have it installed please refer to this [link](https://scikit-learn.org/stable/install.html)",
"_____no_output_____"
]
],
[
[
"# load data from scikit-learn's datasets module\nfrom sklearn.datasets import load_iris\n\niris = load_iris() # returns a python dictionary with the dataset",
"_____no_output_____"
]
],
[
[
"Let's see what the dataset contains:",
"_____no_output_____"
]
],
[
[
"list(iris.keys())",
"_____no_output_____"
]
],
[
[
"- **data**: contains the 4 features of each example in a row, has 150 rows\n- **target**: contains the label for each example _(0->setosa, 1->versicolor, 2->virginica)_\n- **target_names**: contains the names of each target label\n- **DESCR**: contains the desription of the dataset\n- **feature_names**: contains the names of the 4 features(sepal length, sepal width, petal length, petal width)\n- **filename** : where the file is located on the computer\n",
"_____no_output_____"
],
[
"Let's explore the data:",
"_____no_output_____"
]
],
[
[
"iris.data.shape # rows(examples), cols(features)",
"_____no_output_____"
],
[
"iris.target.shape # labels for 150 flowers ",
"_____no_output_____"
],
[
"iris.target_names # print the name of the 3 labels(species) an example could belong to",
"_____no_output_____"
],
[
"iris.feature_names # name of each feature in data's columns",
"_____no_output_____"
],
[
"iris.data[:5, :] # print first 5 examples from the Iris dataset",
"_____no_output_____"
],
[
"iris.target[:5] # print labels for the first 5 examples in the Iris dataset",
"_____no_output_____"
]
],
[
[
"So, the data of the **first** 5 examples looks as follows:\n\n| exmaple# | sepal length (cm) | sepal width (cm) | petal length (cm) | petal width (cm) | target | target name|\n| --- | --- | --- || --- | --- | --- |\n| 0 | 5.1 | 3.5 | 1.4 | 0.2| 0| setosa\n| 1 |4.9| 3. | 1.4| 0.2|0| setosa\n| 2 |4.7| 3.2| 1.3| 0.2|0| setosa\n| 3 |4.6| 3.1| 1.5| 0.2|0| setosa\n| 4 |5. | 3.6| 1.4| 0.2|0| setosa",
"_____no_output_____"
],
[
"For our model we will only use **sepal length and sepal width** to classify whether the Iris flower is _virginica_ or _other_",
"_____no_output_____"
]
],
[
[
"# take only sepal length(0th col) and sepal width(1st col)\nX = iris.data[:, :2] \n\n# fix the labes shape so that instead of (150,) its (150,1),\n# helps avoiding weird broadcasting errors\nY = (iris.target).reshape((150, 1)) ",
"_____no_output_____"
],
[
"X.shape",
"_____no_output_____"
],
[
"Y.shape",
"_____no_output_____"
]
],
[
[
"**Notice** in the table above that the first 5 examples belong to __'setosa'__ species, this pattern continues in the dataset(the pattern is all _setosa_ examples followed by _versicolor_ examples and finally _virginica_ examples). ___A good practice is to randomize the data before training a neural network, so that the neural network does not, by accident, learn a trivial ordering pattern in the data.___\n\nSo let's randomize the data",
"_____no_output_____"
]
],
[
[
"np.random.seed(48) # for reproducible randomization \nrandom_indices = np.random.permutation(len(X)) # genrate random indices\n\nX_train = X[random_indices]\nY_train = Y[random_indices]",
"_____no_output_____"
]
],
[
[
"Now let's again print the first 5 examples and see the results(note this time there are only two features - _sepal lenght_, _sepal width_ )",
"_____no_output_____"
]
],
[
[
"X_train[:5, :]",
"_____no_output_____"
],
[
"Y_train[:5]",
"_____no_output_____"
]
],
[
[
"Now, the data of the **first** 5 examples looks as follows:\n\n| exmaple# | sepal length (cm) | sepal width (cm) | target | target name|\n| --- | --- | --- || --- | \n| 0 | 5.7| 2.9| 1| versicolor \n| 1 | 6.1| 2.8| 1| versicolor\n| 2 | 6.1| 2.6| 2| virginica\n| 3 | 4.5| 2.3| 0| setosa\n| 4 | 5.9| 3.2| 1| versicolor\n\n\nFinally, let's put the training set(`X_train`) & and labels(`Y_train`) in the correct shape `(feat, examples)` and `(examples,1)`, respectively. Also we'll make the target label ___virginica=1___ and the rest ___0___. ",
"_____no_output_____"
]
],
[
[
"# Transpose the data so that it's in the correct shape \n# for passing through neural network\n# also binarize the classes viginica=1 and the rest 0\nX_train = X_train.T\nY_train = Y_train.T \nY_train = (Y_train==2).astype('int') # uses bool logic to binarize labels, wherever label=2 output True(1) rest Flase(0) ",
"_____no_output_____"
],
[
"print(\"Shape of training data, X_train: {}\".format(X_train.shape))\nprint(\"Shape of labels, Y_train: {}\".format(Y_train.shape))",
"Shape of training data, X_train: (2, 150)\nShape of labels, Y_train: (1, 150)\n"
],
[
"Y_train[:, :5] # print first five examples",
"_____no_output_____"
]
],
[
[
"Before training the neural net let's visulaize the data: ",
"_____no_output_____"
]
],
[
[
"cmap = matplotlib.colors.ListedColormap([\"red\", \"green\"], name='from_list', N=None)\n# scattter plot\nscatter = plt.scatter(X_train.T[:, 0], X_train.T[:, 1], \n s=200, c=np.squeeze(Y_train.T), \n marker='x', cmap=cmap) # s-> size of marker\n\nplt.xlabel('sepal lenght', size=20)\nplt.ylabel('sepal width', size=20)\n\nplt.legend(scatter.legend_elements()[0], ['others', 'virginica'])\n\nplt.show()",
"_____no_output_____"
]
],
[
[
"### Notice that this data is very tough to classify perfectly, as many of the data points are intertwined( i.e some green and red points are too close to each other) ",
"_____no_output_____"
],
[
"***\n***",
"_____no_output_____"
],
[
"#### Now we are ready to setup and train the Neural Network\n\nThis is the neural net architecture we'll use\n\n",
"_____no_output_____"
]
],
[
[
"# define training constants\nlearning_rate = 1\nnumber_of_epochs = 10000\n\nnp.random.seed(48) # set seed value so that the results are reproduceable\n # (weights will now be initailzaed to the same pseudo-random numbers, each time)\n\n\n# Our network architecture has the shape: \n# (input)--> [Linear->Sigmoid] -> [Linear->Sigmoid] -> [Linear->Sigmoid] -->(output) \n\n#------ LAYER-1 ----- define 1st hidden layer that takes in training data \nZ1 = LinearLayer(input_shape=X_train.shape, n_out=5, ini_type='xavier')\nA1 = SigmoidLayer(Z1.Z.shape)\n\n#------ LAYER-2 ----- define 2nd hidden layer that takes in values from 1st-hidden layer\nZ2= LinearLayer(input_shape=A1.A.shape, n_out= 3, ini_type='xavier')\nA2= SigmoidLayer(Z2.Z.shape)\n\n\n#------ LAYER-3 ----- define output layer that takes in values from 2nd-hidden layer\nZ3= LinearLayer(input_shape=A2.A.shape, n_out=1, ini_type='xavier')\nA3= SigmoidLayer(Z3.Z.shape)",
"_____no_output_____"
]
],
[
[
"Now we can start the training loop:",
"_____no_output_____"
]
],
[
[
"costs = [] # initially empty list, this will store all the costs after a certian number of epochs\n\n# Start training\nfor epoch in range(number_of_epochs):\n \n # ------------------------- forward-prop -------------------------\n Z1.forward(X_train)\n A1.forward(Z1.Z)\n \n Z2.forward(A1.A)\n A2.forward(Z2.Z)\n \n Z3.forward(A2.A)\n A3.forward(Z3.Z)\n \n # ---------------------- Compute Cost ----------------------------\n cost, dZ3 = compute_stable_bce_cost(Y=Y_train, Z=Z3.Z)\n \n # print and store Costs every 100 iterations and of the last iteration.\n if (epoch % 100) == 0 or epoch == number_of_epochs - 1:\n print(\"Cost at epoch#{}: {}\".format(epoch, cost))\n costs.append(cost)\n \n # ------------------------- back-prop ----------------------------\n \n Z3.backward(dZ3)\n \n A2.backward(Z3.dA_prev)\n Z2.backward(A2.dZ)\n \n A1.backward(Z2.dA_prev)\n Z1.backward(A1.dZ)\n \n # ----------------------- Update weights and bias ----------------\n Z3.update_params(learning_rate=learning_rate)\n Z2.update_params(learning_rate=learning_rate)\n Z1.update_params(learning_rate=learning_rate)",
"Cost at epoch#0: 0.6667400084592768\nCost at epoch#100: 0.6302928461973847\nCost at epoch#200: 0.5709289459585686\nCost at epoch#300: 0.5186742755369246\nCost at epoch#400: 0.459962391632051\nCost at epoch#500: 0.4513239451183089\nCost at epoch#600: 0.4474114673868234\nCost at epoch#700: 0.44591618452429194\nCost at epoch#800: 0.4440500019262934\nCost at epoch#900: 0.4445422747431329\nCost at epoch#1000: 0.4447222560300399\nCost at epoch#1100: 0.44468895649070467\nCost at epoch#1200: 0.444513016851467\nCost at epoch#1300: 0.44424310464735944\nCost at epoch#1400: 0.4439079365579478\nCost at epoch#1500: 0.44352277927682443\nCost at epoch#1600: 0.44309484495031315\nCost at epoch#1700: 0.4426269032540103\nCost at epoch#1800: 0.44211962268537824\nCost at epoch#1900: 0.44157314460972397\nCost at epoch#2000: 0.4409881537658576\nCost at epoch#2100: 0.44036652067718157\nCost at epoch#2200: 0.4397115203535574\nCost at epoch#2300: 0.4390276642670329\nCost at epoch#2400: 0.43832025974366723\nCost at epoch#2500: 0.43759486309421536\nCost at epoch#2600: 0.43685678275515305\nCost at epoch#2700: 0.43611072799072403\nCost at epoch#2800: 0.43536062623941796\nCost at epoch#2900: 0.4346095806889408\nCost at epoch#3000: 0.43385991921259237\nCost at epoch#3100: 0.43311328873358124\nCost at epoch#3200: 0.43237076251124074\nCost at epoch#3300: 0.43163294208185055\nCost at epoch#3400: 0.4309000461916758\nCost at epoch#3500: 0.43017198533157436\nCost at epoch#3600: 0.4294484233996418\nCost at epoch#3700: 0.42872882887118996\nCost at epoch#3800: 0.42801251766568066\nCost at epoch#3900: 0.4272986893144104\nCost at epoch#4000: 0.4265864574027432\nCost at epoch#4100: 0.4258748747549662\nCost at epoch#4200: 0.42516295351561867\nCost at epoch#4300: 0.4244496801706377\nCost at epoch#4400: 0.4237340256236626\nCost at epoch#4500: 0.42301495065322564\nCost at epoch#4600: 0.4222914073672061\nCost at epoch#4700: 0.4215623375807977\nCost at epoch#4800: 0.4208266693215226\nCost at epoch#4900: 0.42008331287705547\nCost at epoch#5000: 0.41933115794019815\nCost at epoch#5100: 0.4185690734836653\nCost at epoch#5200: 0.4177959120426697\nCost at epoch#5300: 0.41701052012519374\nCost at epoch#5400: 0.41621175652709386\nCost at epoch#5500: 0.415398520396492\nCost at epoch#5600: 0.41456979092670226\nCost at epoch#5700: 0.41372468046501054\nCost at epoch#5800: 0.41286250244686284\nCost at epoch#5900: 0.41198285466902285\nCost at epoch#6000: 0.4110857167077694\nCost at epoch#6100: 0.4101715574732299\nCost at epoch#6200: 0.4092414448093109\nCost at epoch#6300: 0.40829714392212585\nCost at epoch#6400: 0.40734118616365816\nCost at epoch#6500: 0.4063768861419422\nCost at epoch#6600: 0.4054082858426165\nCost at epoch#6700: 0.40444001184231304\nCost at epoch#6800: 0.4034770464431862\nCost at epoch#6900: 0.4025244330564821\nCost at epoch#7000: 0.4015869543958715\nCost at epoch#7100: 0.40066883177815055\nCost at epoch#7200: 0.39977349041297944\nCost at epoch#7300: 0.3989034198286793\nCost at epoch#7400: 0.39806013660990763\nCost at epoch#7500: 0.3972442364016274\nCost at epoch#7600: 0.39645550954561487\nCost at epoch#7700: 0.39569309146252823\nCost at epoch#7800: 0.394955623044847\nCost at epoch#7900: 0.39424140433338883\nCost at epoch#8000: 0.39354853332507067\nCost at epoch#8100: 0.39287502880173447\nCost at epoch#8200: 0.39221894068439594\nCost at epoch#8300: 0.39157845348748693\nCost at epoch#8400: 0.39095198818438753\nCost at epoch#8500: 0.3903383054804529\nCost at epoch#8600: 0.3897366094313756\nCost at epoch#8700: 0.3891466449076261\nCost at epoch#8800: 0.38856877581258586\nCost at epoch#8900: 0.38800402242888443\nCost at epoch#9000: 0.3874540220102007\nCost at epoch#9100: 0.3869208465308241\nCost at epoch#9200: 0.38640654356250503\nCost at epoch#9300: 0.3859121199202252\nCost at epoch#9400: 0.385435396529364\nCost at epoch#9500: 0.3849666690102662\nCost at epoch#9600: 0.38448076922649005\nCost at epoch#9700: 0.3839273351964421\nCost at epoch#9800: 0.38324255509741856\nCost at epoch#9900: 0.38244362698615847\nCost at epoch#9999: 0.38255461005942853\n"
]
],
[
[
"Now let's see how well the neural net peforms on the training data after the training as finished\n\n`predict` helper functionin the cell below returns three things:\n\n* `p`: predicted labels (output 1 if predictded output is greater than classification threshold `thresh`)\n* `probas`: raw probabilities (how sure the neural net thinks the output is 1, this is just `P_hat`)\n* `accuracy`: the number of correct predictions from total predictions\n\n",
"_____no_output_____"
]
],
[
[
"classifcation_thresh = 0.5\n\n\npredicted_outputs, p_hat, accuracy = predict(X=X_train, Y=Y_train, \n Zs=[Z1, Z2, Z3], As=[A1, A2, A3], thresh=classifcation_thresh)\n\nprint(\"The predicted outputs of first 5 examples: \\n{}\".format(predicted_outputs[:,:5]))\nprint(\"The predicted prbabilities of first 5 examples:\\n {}\".format(np.round(p_hat[:, :5], decimals=3)) )\nprint(\"\\nThe accuracy of the model is: {}%\".format(accuracy))",
"The predicted outputs of first 5 examples: \n[[ 0. 1. 1. 0. 0.]]\nThe predicted prbabilities of first 5 examples:\n [[ 0.355 0.579 0.549 0.067 0.366]]\n\nThe accuracy of the model is: 80.66666666666666%\n"
]
],
[
[
"#### The Learning Curve",
"_____no_output_____"
]
],
[
[
"plot_learning_curve(costs, learning_rate, total_epochs=number_of_epochs)",
"_____no_output_____"
]
],
[
[
"#### The Decision Boundary",
"_____no_output_____"
]
],
[
[
"plot_decision_boundary(lambda x: predict_dec(Zs=[Z1, Z2, Z3], As=[A1, A2, A3], X=x.T, thresh=classifcation_thresh), \n X=X_train.T, Y=Y_train)",
"_____no_output_____"
]
],
[
[
"#### The Shaded Decision Boundary",
"_____no_output_____"
]
],
[
[
"plot_decision_boundary_shaded(lambda x: predict_dec(Zs=[Z1, Z2, Z3], As=[A1, A1, A3], X=x.T, thresh=classifcation_thresh), \n X=X_train.T, Y=Y_train)",
"_____no_output_____"
]
],
[
[
"## Bounus\n\nTrain this dataset using only a 1-layer or 2-layer neural network\n\n_(Hint: works slightly better)_",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
4a65a9b774f3f8dd209cef4b1b346537ab814a52
| 9,809 |
ipynb
|
Jupyter Notebook
|
Exporter.ipynb
|
SkBlaz/KBNR
|
4c37fe3fdfa7719572affd617e2dab43a54ba1d5
|
[
"MIT"
] | 1 |
2022-02-04T07:57:55.000Z
|
2022-02-04T07:57:55.000Z
|
Exporter.ipynb
|
SkBlaz/KBNR
|
4c37fe3fdfa7719572affd617e2dab43a54ba1d5
|
[
"MIT"
] | 1 |
2022-02-24T13:00:33.000Z
|
2022-02-24T13:00:33.000Z
|
Exporter.ipynb
|
SkBlaz/KBNR
|
4c37fe3fdfa7719572affd617e2dab43a54ba1d5
|
[
"MIT"
] | 1 |
2022-02-04T08:00:16.000Z
|
2022-02-04T08:00:16.000Z
| 32.58804 | 170 | 0.516159 |
[
[
[
"import san\nfrom src_end2end import statistical_features\nimport lsa_features\nimport pickle\nimport numpy as np\nfrom tqdm import tqdm\nimport pandas as pd\nimport os\nimport skopt\nfrom skopt import gp_minimize\nfrom sklearn import preprocessing\nfrom skopt.space import Real, Integer, Categorical\nfrom skopt.utils import use_named_args\nfrom sklearn.metrics import f1_score\nst_models = [\"roberta-large-nli-stsb-mean-tokens\", \"xlm-r-large-en-ko-nli-ststb\", \"distilbert-base-nli-mean-tokens\"]",
"_____no_output_____"
],
[
"from sentence_transformers import SentenceTransformer\nst_models = [\"roberta-large-nli-stsb-mean-tokens\", \"xlm-r-large-en-ko-nli-ststb\", \"distilbert-base-nli-mean-tokens\"]\ndef embedd_bert(text, st_model = 'paraphrase-distilroberta-base-v1', split = 'train'): \n paths = \"temp_berts/\"+st_model+\"_\"+split+'.pkl'\n if os.path.isfile(paths):\n sentence_embeddings = pickle.load(open(paths,'rb')) \n return sentence_embeddings\n model = SentenceTransformer(st_model)\n sentence_embeddings = model.encode(text)\n with open(paths, 'wb') as f:\n pickle.dump(sentence_embeddings, f)\n return sentence_embeddings",
"_____no_output_____"
],
[
"from sentence_transformers import SentenceTransformer\nst_models = [\"roberta-large-nli-stsb-mean-tokens\", \"xlm-r-large-en-ko-nli-ststb\", \"distilbert-base-nli-mean-tokens\"]\ndef embedd_bert2(text, st_model = 'paraphrase-distilroberta-base-v1'): \n text = [t[:512] for t in text]\n model = SentenceTransformer(st_model)\n sentence_embeddings = model.encode(text)\n return sentence_embeddings",
"_____no_output_____"
],
[
"def export_kgs(dataset):\n path = \"representations/\"+dataset+\"/\"\n for split in [\"train\", \"dev\", \"test\"]:\n for kg in [\"complex\", \"transe\", \"quate\", \"simple\", \"rotate\", \"distmult\"]:\n path_tmp = path + split + \"/\" + kg + \".csv\"\n tmp_kg = prep_kgs(kg, split)\n tmp_kg = np.array((tmp_kg))\n np.savetxt(path_tmp, tmp_kg, delimiter=\",\")\n ",
"_____no_output_____"
],
[
"def prep_kgs(kg_emb, split='train'):\n embs = []\n global dataset\n path_in = \"kg_emb_dump/\"+dataset+\"/\"+split+\"_\"+kg_emb+'_n.pkl'\n with open(path_in, \"rb\") as f:\n kgs_p = pickle.load(f)\n for x,y in kgs_p:\n embs.append(y)\n return embs",
"_____no_output_____"
],
[
"def export_kgs_spec(dataset):\n path = \"representations/\"+dataset+\"/\"\n for split in [\"train\", \"dev\", \"test\"]:\n for kg in [\"complex\", \"transe\", \"quate\", \"simple\", \"rotate\", \"distmult\"]:\n path_tmp = path + split + \"/\" + kg + \"_entity.csv\"\n tmp_kg = prep_kgs2(kg, split)\n tmp_kg = np.array((tmp_kg))\n np.savetxt(path_tmp, tmp_kg, delimiter=\",\")\n ",
"_____no_output_____"
],
[
"def export_LM(dataset):\n texts = {}\n ys = {}\n path = \"representations/\"+dataset+\"/\"\n for thing in [\"train\", \"dev\", \"test\"]:\n path_in = \"data/final/\"+dataset+\"/\"+thing+'.csv'\n df = pd.read_csv(path_in, encoding='utf-8')\n texts[thing] = df.text_a.to_list()\n #ys[thing] = df.label.to_list()\n staticstical = statistical_features.fit_space(texts[thing])\n kg = 'stat'\n path_tmp = path + thing + \"/\" + kg + \".csv\"\n np.savetxt(path_tmp, staticstical, delimiter=\",\")\n \n bertz = embedd_bert2(texts[thing], st_models[0])\n kg = st_models[0]\n path_tmp = path + thing + \"/\" + kg + \".csv\"\n np.savetxt(path_tmp, bertz, delimiter=\",\")\n \n bertz2 = embedd_bert2(texts[thing], st_models[1]) \n kg = st_models[1]\n path_tmp = path + thing + \"/\" + kg + \".csv\"\n np.savetxt(path_tmp, bertz2, delimiter=\",\")\n \n bertz3 = embedd_bert2(texts[thing], st_models[2]) \n kg = st_models[2]\n path_tmp = path + thing + \"/\" + kg + \".csv\"\n np.savetxt(path_tmp, bertz3, delimiter=\",\") ",
"_____no_output_____"
],
[
"for dataset in tqdm([\"pan2020\", \"AAAI2021_COVID19_fake_news\", \"LIAR_PANTS\", \"ISOT\", \"FakeNewsNet\"]):\n path = \"representations/\"+dataset+\"/\"\n for thing in [\"train\", \"dev\", \"test\"]:\n path_in = \"data/final/\"+dataset+\"/\"+thing+'.csv'\n df = pd.read_csv(path_in, encoding='utf-8')\n if dataset == \"pan2020\":\n ys = df.labels.to_list()\n else:\n ys = df.label.to_list() \n path_tmp = path + thing + \"/\" + \"_ys.csv\"\n np.savetxt(path_tmp, ys, delimiter=\",\")",
"100%|██████████| 5/5 [00:01<00:00, 3.47it/s]\n"
],
[
"def prep_kgs2(kg_emb, split='train'):\n embs = []\n global dataset\n path_in = \"kg_emb_dump/\"+dataset+\"/\"+split+\"_\"+kg_emb+'_speakers.pkl'\n with open(path_in, \"rb\") as f:\n kgs_p = pickle.load(f)\n for x,y in kgs_p:\n embs.append(y)\n return embs",
"_____no_output_____"
],
[
"from tqdm import tqdm\nfor dataset in tqdm([\"LIAR_PANTS\", \"FakeNewsNet\"]):\n export_kgs_spec(dataset)",
"100%|██████████| 2/2 [01:11<00:00, 35.73s/it]\n"
],
[
"for dataset in tqdm([\"pan2020\", \"AAAI2021_COVID19_fake_news\", \"LIAR_PANTS\", \"ISOT\", \"FakeNewsNet\"]):\n export_kgs(dataset)",
"100%|██████████| 5/5 [04:07<00:00, 49.49s/it]\n"
],
[
"from tqdm import tqdm\nfor dataset in tqdm([\"ISOT\", \"pan2020\"]):#)\"\"LIAR_PANTS\",\"pan2020\", \"ISOT\", \"AAAI2021_COVID19_fake_news\", \"FakeNewsNet\"]):\n export_LM(dataset)",
" 0%| | 0/2 [00:00<?, ?it/s]18-Mar-21 21:26:43 - Load pretrained SentenceTransformer: roberta-large-nli-stsb-mean-tokens\n18-Mar-21 21:26:43 - Did not find folder roberta-large-nli-stsb-mean-tokens\n18-Mar-21 21:26:43 - Try to download model from server: https://sbert.net/models/roberta-large-nli-stsb-mean-tokens.zip\n18-Mar-21 21:26:43 - Load SentenceTransformer from folder: /home/boshkok/.cache/torch/sentence_transformers/sbert.net_models_roberta-large-nli-stsb-mean-tokens\n18-Mar-21 21:26:48 - Use pytorch device: cuda\n"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a65abedc4f640c408de52dffa458723c19407c5
| 69,151 |
ipynb
|
Jupyter Notebook
|
nbs/20c_qlearning.dqn_target.ipynb
|
tyoc213-contrib/fast-reinforcement-learning-2
|
66b27fcd7f65122ca9cda46bcd1cb35f2749337e
|
[
"Apache-2.0"
] | null | null | null |
nbs/20c_qlearning.dqn_target.ipynb
|
tyoc213-contrib/fast-reinforcement-learning-2
|
66b27fcd7f65122ca9cda46bcd1cb35f2749337e
|
[
"Apache-2.0"
] | null | null | null |
nbs/20c_qlearning.dqn_target.ipynb
|
tyoc213-contrib/fast-reinforcement-learning-2
|
66b27fcd7f65122ca9cda46bcd1cb35f2749337e
|
[
"Apache-2.0"
] | null | null | null | 92.078562 | 3,422 | 0.618747 |
[
[
[
"# default_exp qlearning.dqn_target",
"_____no_output_____"
],
[
"#export\nimport torch.nn.utils as nn_utils\nfrom fastai.torch_basics import *\nfrom fastai.data.all import *\nfrom fastai.basics import *\nfrom dataclasses import field,asdict\nfrom typing import List,Any,Dict,Callable\nfrom collections import deque\nimport gym\nimport torch.multiprocessing as mp\nfrom torch.optim import *\n\nfrom fastrl.data import *\nfrom fastrl.async_data import *\nfrom fastrl.basic_agents import *\nfrom fastrl.learner import *\nfrom fastrl.metrics import *\nfrom fastrl.ptan_extension import *\nfrom fastrl.qlearning.dqn import *\n\nif IN_NOTEBOOK:\n from IPython import display\n import PIL.Image",
"/opt/conda/envs/fastrl/lib/python3.7/site-packages/torch/cuda/__init__.py:52: UserWarning: CUDA initialization: CUDA unknown error - this may be due to an incorrectly set up environment, e.g. changing env variable CUDA_VISIBLE_DEVICES after program start. Setting the available devices to be zero. (Triggered internally at /opt/conda/conda-bld/pytorch_1603729047590/work/c10/cuda/CUDAFunctions.cpp:100.)\n return torch._C._cuda_getDeviceCount() > 0\n"
]
],
[
[
"# Target DQN",
"_____no_output_____"
]
],
[
[
"# export\nclass TargetDQNTrainer(Callback):\n def __init__(self,n_batch=0): store_attr()\n def after_pred(self):\n exps=[ExperienceFirstLast(*o) for o in self.learn.sample_yb]\n s=torch.stack([e.state for e in exps]).float().to(default_device())\n a=torch.stack([e.action for e in exps]).to(default_device())\n sp=torch.stack([e.last_state for e in exps]).float().to(default_device())\n r=torch.stack([e.reward for e in exps]).float().to(default_device())\n d=torch.stack([e.done for e in exps]).to(default_device())\n\n state_action_values = self.learn.model(s.float()).gather(1, a.unsqueeze(-1)).squeeze(-1)\n# next_state_values = self.learn.target_model(sp.float()).max(1)[0]\n next_state_values=self.get_next_state_values(sp)\n next_state_values[d] = 0.0\n\n expected_state_action_values=next_state_values.detach()*(self.learn.discount**self.learn.n_steps)+r\n# print(*self.learn.yb,self.learn.pred)\n# print(self.learn.pred,self.learn.yb)\n# print(self.learn._yb,self.learn.yb[0])\n self.learn._yb=self.learn.yb\n self.learn.yb=(expected_state_action_values.float(),)\n# print(self.learn.yb[0].mean(),self.learn.yb[0].size())\n self.learn.pred=state_action_values\n# print(self.learn.pred.mean(),self.learn.pred.size())\n \n# print(self.learn.agent.a_selector.epsilon,self.n_batch)\n \n def get_next_state_values(self,sp):\n return self.learn.target_model(sp.float()).max(1)[0]\n \n# def after_epoch(self): print(len(self.learn.cbs[4].queue))\n \n def after_loss(self):\n self.learn.yb=self.learn._yb\n \n def after_batch(self):\n if self.n_batch%self.learn.target_sync==0:\n# print('copy over',self.n_batch)\n self.learn.target_model.load_state_dict(self.learn.model.state_dict())\n self.n_batch+=1",
"_____no_output_____"
],
[
"# export\nclass TargetDQNLearner(AgentLearner):\n def __init__(self,dls,discount=0.99,n_steps=3,target_sync=300,**kwargs):\n store_attr()\n self.target_q_v=[]\n super().__init__(dls,loss_func=nn.MSELoss(),**kwargs)\n self.target_model=deepcopy(self.model)",
"_____no_output_____"
],
[
"env='CartPole-v1'\nmodel=LinearDQN((4,),2)\nagent=DiscreteAgent(model=model.to(default_device()),device=default_device(),\n a_selector=EpsilonGreedyActionSelector())\n\nblock=FirstLastExperienceBlock(agent=agent,seed=0,n_steps=1,dls_kwargs={'bs':1,'num_workers':0,'verbose':False,'indexed':True,'shuffle_train':False})\nblk=IterableDataBlock(blocks=(block),\n splitter=FuncSplitter(lambda x:False),\n )\ndls=blk.dataloaders([env]*1,n=1*1000,device=default_device())\n\nlearner=TargetDQNLearner(dls,agent=agent,n_steps=1,cbs=[EpsilonTracker(e_steps=100),\n ExperienceReplay(sz=100000,bs=32,starting_els=32,max_steps=gym.make(env)._max_episode_steps),\n TargetDQNTrainer],metrics=[AvgEpisodeRewardMetric(experience_cls=ExperienceFirstLast,always_extend=True)])\nlearner.fit(47,lr=0.0001,wd=0)",
"_____no_output_____"
],
[
"# hide\nfrom nbdev.export import *\nfrom nbdev.export2html import *\nnotebook2script()\nnotebook2html()",
"Converted 00_core.ipynb.\nConverted 01_wrappers.ipynb.\nConverted 03_basic_agents.ipynb.\nConverted 04_learner.ipynb.\nConverted 05a_ptan_extend.ipynb.\nConverted 05b_data.ipynb.\nConverted 05c_async_data.ipynb.\nConverted 13_metrics.ipynb.\nConverted 14a_actorcritic.sac.ipynb.\nConverted 14b_actorcritic.diayn.ipynb.\nConverted 15_actorcritic.a3c_data.ipynb.\nConverted 16_actorcritic.a2c.ipynb.\nConverted 17_actorcritc.v1.dads.ipynb.\nConverted 18_policy_gradient.ppo.ipynb.\nConverted 19_policy_gradient.trpo.ipynb.\nConverted 20a_qlearning.dqn.ipynb.\nConverted 20b_qlearning.dqn_n_step.ipynb.\nConverted 20c_qlearning.dqn_target.ipynb.\nConverted 20d_qlearning.dqn_double.ipynb.\nConverted 20e_qlearning.dqn_noisy.ipynb.\nConverted index.ipynb.\nConverted notes.ipynb.\n"
]
]
] |
[
"code",
"markdown",
"code"
] |
[
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
]
] |
4a65b23709755140046676b4f042e2c366311efb
| 294,373 |
ipynb
|
Jupyter Notebook
|
3. SMS Spam Collection by Using LGBM Classifier.ipynb
|
MalikAfuHamid/SMS-Spam-Collection
|
88948ea106c4ac5531e5e8a35d67f08933fa37df
|
[
"Unlicense"
] | null | null | null |
3. SMS Spam Collection by Using LGBM Classifier.ipynb
|
MalikAfuHamid/SMS-Spam-Collection
|
88948ea106c4ac5531e5e8a35d67f08933fa37df
|
[
"Unlicense"
] | null | null | null |
3. SMS Spam Collection by Using LGBM Classifier.ipynb
|
MalikAfuHamid/SMS-Spam-Collection
|
88948ea106c4ac5531e5e8a35d67f08933fa37df
|
[
"Unlicense"
] | null | null | null | 101.85917 | 29,648 | 0.749355 |
[
[
[
"# 1. Import Libraries",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport pandas as pd\n\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.compose import ColumnTransformer\n\nfrom jcopml.pipeline import num_pipe, cat_pipe\nfrom jcopml.utils import save_model, load_model\nfrom jcopml.plot import plot_missing_value\nfrom jcopml.feature_importance import mean_score_decrease\n\nfrom luwiji.text_proc import illustration\nfrom jcopml.plot import plot_missing_value\nfrom jcopml.plot import plot_confusion_matrix\nfrom jcopml.plot import plot_roc_curve\nfrom jcopml.plot import plot_classification_report\nfrom jcopml.plot import plot_pr_curve\n\nfrom nltk.tokenize import word_tokenize\nfrom nltk.corpus import stopwords\nfrom string import punctuation\n\nsw_eng = stopwords.words('english') \nimport string\n\nimport seaborn as sns",
"_____no_output_____"
]
],
[
[
"# 2. Import Dataset",
"_____no_output_____"
]
],
[
[
"df = [line.rstrip() for line in open('SMSSpamCollection')]\ndf",
"_____no_output_____"
],
[
"df = pd.read_csv('SMSSpamCollection', names=['label','message'], sep='\\t')\ndf.head()",
"_____no_output_____"
],
[
"df.label = df.label.replace({'ham': 0 ,'spam': 1})",
"_____no_output_____"
],
[
"df",
"_____no_output_____"
]
],
[
[
"# 3. Exploratory Data Analysis (EDA)",
"_____no_output_____"
],
[
"### Data Information",
"_____no_output_____"
]
],
[
[
"df.info()",
"<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 5572 entries, 0 to 5571\nData columns (total 2 columns):\n # Column Non-Null Count Dtype \n--- ------ -------------- ----- \n 0 label 5572 non-null int64 \n 1 message 5572 non-null object\ndtypes: int64(1), object(1)\nmemory usage: 87.2+ KB\n"
],
[
"df.shape",
"_____no_output_____"
],
[
"plot_missing_value(df)",
"_____no_output_____"
]
],
[
[
"### Data Description",
"_____no_output_____"
]
],
[
[
"df.head()",
"_____no_output_____"
],
[
"df.describe()",
"_____no_output_____"
],
[
"df.groupby('label').describe()",
"_____no_output_____"
]
],
[
[
"### Count a Character in Text Message",
"_____no_output_____"
]
],
[
[
"df['Length_Char'] = df['message'].apply(len)\ndf.head()",
"_____no_output_____"
],
[
"# Visualize a length of Character in text message\n\ndf['Length_Char'].plot.hist(bins = 150, edgecolor='black')",
"_____no_output_____"
],
[
"df.Length_Char.describe()",
"_____no_output_____"
],
[
"#Grab the maximum character in text message\n\ndf[df['Length_Char'] == 910]['message'].iloc[0]",
"_____no_output_____"
]
],
[
[
"### Visualize Label Distribution",
"_____no_output_____"
]
],
[
[
"df.hist(column='Length_Char', by='label', bins =60, figsize=(12,4), edgecolor='black', color = 'red')",
"_____no_output_____"
]
],
[
[
"## Cleaning Dataset",
"_____no_output_____"
]
],
[
[
"df.message",
"_____no_output_____"
],
[
"import re",
"_____no_output_____"
],
[
"def clean_data(text):\n text = text.lower()\n clean_word = word_tokenize(text)\n clean_word = [word for word in clean_word if word not in punctuation]\n clean_word = [word for word in clean_word if len(word) > 1 and word.isalpha()]\n clean_word = [word for word in clean_word if word not in sw_eng]\n emoji_removal = re.compile(\"[\"\n u\"\\U0001F600-\\U0001F64F\" # emoticons\n u\"\\U0001F300-\\U0001F5FF\" # symbols & pictographs\n u\"\\U0001F680-\\U0001F6FF\" # transport & map symbols\n u\"\\U0001F1E0-\\U0001F1FF\" # flags (iOS)\n \"]+\", flags=re.UNICODE)\n clean_word = ' '.join(clean_word)\n return emoji_removal.sub(r'', clean_word)",
"_____no_output_____"
],
[
"df.message = df.message.apply(clean_data)\ndf.message",
"_____no_output_____"
]
],
[
[
"## Check Imbalanced Dataset",
"_____no_output_____"
]
],
[
[
"sns.countplot(df.label)",
"C:\\Users\\Malik Afu Hamid\\anaconda3\\lib\\site-packages\\seaborn\\_decorators.py:36: FutureWarning: Pass the following variable as a keyword arg: x. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.\n warnings.warn(\n"
],
[
"df.label.value_counts()",
"_____no_output_____"
]
],
[
[
"# 4. Dataset Splitting",
"_____no_output_____"
]
],
[
[
"X = df.message\ny = df.label\n\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, stratify=y, random_state=42)\nX_train.shape, X_test.shape, y_train.shape, y_test.shape",
"_____no_output_____"
]
],
[
[
"# 5. Modeling",
"_____no_output_____"
]
],
[
[
"from sklearn.feature_extraction.text import TfidfVectorizer\nfrom lightgbm import LGBMClassifier\nfrom sklearn.model_selection import RandomizedSearchCV\nfrom jcopml.tuning import random_search_params as rsp",
"_____no_output_____"
],
[
"from jcopml.tuning.space import Integer, Real",
"_____no_output_____"
],
[
"pipeline = Pipeline([\n ('prep', TfidfVectorizer()),\n ('algo', LGBMClassifier(n_jobs=-1, random_state=42))\n])\n\nparameter = {'algo__num_leaves': Integer(low=60, high=300),\n 'algo__max_depth': Integer(low=1, high=10),\n 'algo__learning_rate': Real(low=-2, high=0, prior='log-uniform'),\n 'algo__n_estimators': Integer(low=150, high=300),\n 'algo__min_child_samples': Integer(low=10, high=40),\n 'algo__min_child_weight': Real(low=-3, high=-2, prior='log-uniform'),\n 'algo__subsample': Real(low=0.3, high=0.8, prior='uniform'),\n 'algo__colsample_bytree': Real(low=0.1, high=1, prior='uniform'),\n 'algo__reg_alpha': Real(low=-1, high=1, prior='log-uniform'),\n 'algo__reg_lambda': Real(low=-3, high=1, prior='log-uniform'),\n}\n\n\nmodel = RandomizedSearchCV(pipeline, parameter, cv=3, n_iter= 100, n_jobs=-2, verbose=1, random_state=42)\nmodel.fit(X_train, y_train)\n\nprint(model.best_params_)\nprint(model.score(X_train, y_train), model.best_score_, model.score(X_test, y_test))",
"Fitting 3 folds for each of 100 candidates, totalling 300 fits\n"
]
],
[
[
"# 6. Hyperparameters Tuning",
"_____no_output_____"
]
],
[
[
"pipeline = Pipeline([\n ('prep', TfidfVectorizer()),\n ('algo', LGBMClassifier(n_jobs=-1, random_state=42))\n])\n\nparameter = {'algo__num_leaves': Integer(low=60, high=300),\n 'algo__max_depth': Integer(low=1, high=10),\n 'algo__learning_rate': Real(low=-2, high=0, prior='log-uniform'),\n 'algo__n_estimators': Integer(low=150, high=300),\n 'algo__min_child_samples': Integer(low=10, high=40),\n 'algo__min_child_weight': Real(low=-3, high=-2, prior='log-uniform'),\n 'algo__subsample': Real(low=0.3, high=0.8, prior='uniform'),\n 'algo__colsample_bytree': Real(low=0.1, high=1, prior='uniform'),\n 'algo__reg_alpha': Real(low=-1, high=1, prior='log-uniform'),\n 'algo__reg_lambda': Real(low=-2, high=1, prior='log-uniform'),\n}\n\n\nmodel = RandomizedSearchCV(pipeline, parameter, cv=3, n_iter= 100, n_jobs=-2, verbose=1, random_state=42)\nmodel.fit(X_train, y_train)\n\nprint(model.best_params_)\nprint(model.score(X_train, y_train), model.best_score_, model.score(X_test, y_test))",
"Fitting 3 folds for each of 100 candidates, totalling 300 fits\n"
]
],
[
[
"# 7. Evaluation",
"_____no_output_____"
],
[
"## 7.1. Classification Report",
"_____no_output_____"
]
],
[
[
"plot_classification_report(X_train, y_train, X_test, y_test, model)",
"_____no_output_____"
],
[
"plot_classification_report(X_train, y_train, X_test, y_test, model, report=True)",
"Train report\n precision recall f1-score support\n\n 0 0.99 1.00 0.99 3859\n 1 1.00 0.93 0.96 598\n\n accuracy 0.99 4457\n macro avg 0.99 0.96 0.98 4457\nweighted avg 0.99 0.99 0.99 4457\n\n\nTest report\n precision recall f1-score support\n\n 0 0.98 1.00 0.99 966\n 1 0.98 0.85 0.91 149\n\n accuracy 0.98 1115\n macro avg 0.98 0.92 0.95 1115\nweighted avg 0.98 0.98 0.98 1115\n\n"
]
],
[
[
"## 7.2. Confusion Matrix",
"_____no_output_____"
]
],
[
[
"plot_confusion_matrix(X_train, y_train, X_test, y_test, model)",
"_____no_output_____"
]
],
[
[
"## 7.3. ROC AUC Curve",
"_____no_output_____"
]
],
[
[
"plot_roc_curve(X_train, y_train, X_test, y_test, model)",
"_____no_output_____"
]
],
[
[
"## 7.4. Precision-Recall Curve",
"_____no_output_____"
]
],
[
[
"plot_pr_curve(X_train, y_train, X_test, y_test, model)",
"_____no_output_____"
]
],
[
[
"### Result Analysis",
"_____no_output_____"
]
],
[
[
"df_analysis = pd.DataFrame(X_test)\ndf_analysis['Prediction'] = model.predict(X_test)\ndf_analysis['Actual'] = y_test\ndf_analysis",
"_____no_output_____"
],
[
"df_analysis[(df_analysis['Prediction'] == 0) & (df_analysis['Actual'] == 1)] ",
"_____no_output_____"
],
[
"df_analysis[(df_analysis['Prediction'] == 1) & (df_analysis['Actual'] == 0)] ",
"_____no_output_____"
]
],
[
[
"# Save Model",
"_____no_output_____"
]
],
[
[
"save_model(model.best_estimator_, 'SMS_Spam_Classifier_LGBM_Classifier.pkl')",
"Model is pickled as model/SMS_Spam_Classifier_LGBM_Classifier.pkl\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a65b39abc7609a6fd32e63c4dd4b3d105261518
| 6,704 |
ipynb
|
Jupyter Notebook
|
Programming_Assingment17.ipynb
|
krishnasoft002/Python-Programming-Basic-Assignment
|
4ae931f70974f9edd0cda56e484a3a2df0a264c9
|
[
"CNRI-Python"
] | 1 |
2021-11-21T08:21:14.000Z
|
2021-11-21T08:21:14.000Z
|
Programming_Assingment17.ipynb
|
akashdeep364/Python-Programming-Basic-Assignment
|
3258e8e8c76d43d72a233ccfba4e5252377e1e5f
|
[
"CNRI-Python"
] | null | null | null |
Programming_Assingment17.ipynb
|
akashdeep364/Python-Programming-Basic-Assignment
|
3258e8e8c76d43d72a233ccfba4e5252377e1e5f
|
[
"CNRI-Python"
] | null | null | null | 23.358885 | 99 | 0.476581 |
[
[
[
"# Programming_Assingment17",
"_____no_output_____"
]
],
[
[
"Question1. \nCreate a function that takes three arguments a, b, c and returns the sum of the\nnumbers that are evenly divided by c from the range a, b inclusive.\nExamples\nevenly_divisible(1, 10, 20) ➞ 0\n# No number between 1 and 10 can be evenly divided by 20.\nevenly_divisible(1, 10, 2) ➞ 30\n# 2 + 4 + 6 + 8 + 10 = 30\nevenly_divisible(1, 10, 3) ➞ 18\n# 3 + 6 + 9 = 18\n\n",
"_____no_output_____"
],
[
"def sumDivisibles(a, b, c): \n sum = 0\n for i in range(a, b + 1): \n if (i % c == 0):\n sum += i \n return sum\na = int(input('Enter a : '))\nb = int(input('Enter b : '))\nc = int(input('Enter c : '))\nprint(sumDivisibles(a, b, c))",
"Enter a : 1\nEnter b : 10\nEnter c : 3\n18\n"
]
],
[
[
"### Question2. \n Create a function that returns True if a given inequality expression is correct and\n False otherwise.\n Examples\n correct_signs(\"3 > 7 < 11\") ➞ True\n correct_signs(\"13 > 44 > 33 > 1\") ➞ False\n correct_signs(\"1 < 2 < 6 < 9 > 3\") ➞ True\n\n",
"_____no_output_____"
]
],
[
[
"def correct_signs ( txt ) : \n return eval ( txt )\nprint(correct_signs(\"3 > 7 < 11\"))\nprint(correct_signs(\"13 > 44 > 33 > 1\"))\nprint(correct_signs(\"1 < 2 < 6 < 9 > 3\"))",
"False\nFalse\nTrue\n"
]
],
[
[
"### Question3. \n Create a function that replaces all the vowels in a string with a specified character.\n Examples\n replace_vowels('the aardvark, '#') ➞ 'th# ##rdv#rk'\n replace_vowels('minnie mouse', '?') ➞ 'm?nn?? m??s?'\n replace_vowels('shakespeare', '*') ➞ 'sh*k*sp**r*'\n\n",
"_____no_output_____"
]
],
[
[
"def replace_vowels(str, s):\n vowels = 'AEIOUaeiou'\n for ele in vowels: \n str = str.replace(ele, s) \n return str\n \ninput_str = input(\"enter a string : \")\ns = input(\"enter a vowel replacing string : \")\nprint(\"\\nGiven Sting:\", input_str)\nprint(\"Given Specified Character:\", s)\nprint(\"Afer replacing vowels with the specified character:\",replace_vowels(input_str, s))\n",
"enter a string : akash\nenter a vowel replacing string : @\n\nGiven Sting: akash\nGiven Specified Character: @\nAfer replacing vowels with the specified character: @k@sh\n"
]
],
[
[
"### Question4. \n Write a function that calculates the factorial of a number recursively.\n Examples\n factorial(5) ➞ 120\n factorial(3) ➞ 6\n factorial(1) ➞ 1\n factorial(0) ➞ 1\n\n",
"_____no_output_____"
]
],
[
[
"def factorial(n): \n if n == 0:\n return 1 \n return n * factorial(n-1)\n\nnum = int(input('enter a number :'))\nprint(\"Factorial of\", num, \"is\", factorial(num))",
"enter a number :5\nFactorial of 5 is 120\n"
]
],
[
[
"### Question 5\n Hamming distance is the number of characters that differ between two strings.\n To illustrate:\n String1: 'abcbba'\n String2: 'abcbda'\n Hamming Distance: 1 - 'b' vs. 'd' is the only difference.\n Create a function that computes the hamming distance between two strings.\n Examples\n hamming_distance('abcde', 'bcdef') ➞ 5\n hamming_distance('abcde', 'abcde') ➞ 0\n hamming_distance('strong', 'strung') ➞ 1",
"_____no_output_____"
]
],
[
[
"def hamming_distance(str1, str2):\n i = 0\n count = 0\n \n while(i < len(str1)):\n if(str1[i] != str2[i]):\n count += 1\n i += 1\n return count\n \n# Driver code \nstr1 = \"abcde\"\nstr2 = \"bcdef\"\n \n# function call\nprint(hamming_distance(str1, str2))",
"5\n"
],
[
"print(hamming_distance('strong', 'strung'))",
"1\n"
],
[
"hamming_distance('abcde', 'abcde')",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
4a65b80e0fdd7e43320eee17a4d952055db7a1ed
| 2,457 |
ipynb
|
Jupyter Notebook
|
py-scientometrics.ipynb
|
challenge19/py-scientomerics-ipynb
|
0d1d4e69024cd770196319e0575d92396d02e7ca
|
[
"MIT"
] | null | null | null |
py-scientometrics.ipynb
|
challenge19/py-scientomerics-ipynb
|
0d1d4e69024cd770196319e0575d92396d02e7ca
|
[
"MIT"
] | null | null | null |
py-scientometrics.ipynb
|
challenge19/py-scientomerics-ipynb
|
0d1d4e69024cd770196319e0575d92396d02e7ca
|
[
"MIT"
] | 1 |
2021-12-25T19:01:53.000Z
|
2021-12-25T19:01:53.000Z
| 24.088235 | 142 | 0.58893 |
[
[
[
"利用Python进行科学计量分析和可视化\n===\n\n**该notebook将介绍如何利用python及相应得程序包进行科学计量分析和可视化**\n## 基础准备\n\n### 科学计量学\n\n### 数据分析和可视化包\n主要涉及的程序根据其主要功能可分为**数据分析**和**可视化**,但某些会同时包含这两个功能。\n\n#### 数据分析\n- numpy:基于矩阵(数组)的科学计算包\n- scipy:科学计算包\n- statsmodels:统计分析包\n- pandas:数据结构和分析包,提供方便的基于DataFrame的数据操作\n- sklearn:数据分析和机器学习包\n- [metaknowledge](http://networkslab.org/metaknowledge/):文献计量分析包,可直接读取WoS下载的原始文件并解析\n- networkx:网络分析包,提供常见的社会网络分析;与之类似的还有igraph\n\n#### 可视化\n- matplotlib:python下的基本绘图包,许多其它绘图包都基于此\n- seaborn:结合pandas的统计可视化包,提供方便的统计分析绘图\n- plotly:提供方便的交互图表,对科学绘图也比较友好\n- bokeh:基于D3的绘图工具,提供交互式图表,可定制dashboard\n- pyecharts:提供基于echarts的交互图表绘图",
"_____no_output_____"
],
[
"## 科学计量理论研究",
"_____no_output_____"
],
[
"### 期刊论文引用数的分布\n- [对数正态分布是Citation Success Index产生的原因](https://github.com/challenge19/py-scientometrics-ipynb/blob/master/notebooks/Reason_of_CSI.ipynb)",
"_____no_output_____"
],
[
"## 在线开放数据的获取与分析\n- [altmetrics](https://github.com/challenge19/py-scientometrics-ipynb/blob/master/notebooks/altmetrics.ipynb)\n- [Crossref](https://github.com/challenge19/py-scientometrics-ipynb/blob/master/notebooks/Examples%20with%20Crossref.ipynb)",
"_____no_output_____"
]
]
] |
[
"markdown"
] |
[
[
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
4a65c0e6e60ad128a67a08664eb84aaba09feec2
| 37,367 |
ipynb
|
Jupyter Notebook
|
deeplearning1/nbs/mercedes/code/mercedes-benz-greener-manufacturing-base.ipynb
|
krohitm/fast_ai
|
f6f0adcf550288e5a92540151ada87d5fee56c20
|
[
"Apache-2.0"
] | null | null | null |
deeplearning1/nbs/mercedes/code/mercedes-benz-greener-manufacturing-base.ipynb
|
krohitm/fast_ai
|
f6f0adcf550288e5a92540151ada87d5fee56c20
|
[
"Apache-2.0"
] | null | null | null |
deeplearning1/nbs/mercedes/code/mercedes-benz-greener-manufacturing-base.ipynb
|
krohitm/fast_ai
|
f6f0adcf550288e5a92540151ada87d5fee56c20
|
[
"Apache-2.0"
] | null | null | null | 32.951499 | 220 | 0.544224 |
[
[
[
"## Importing necessary packages",
"_____no_output_____"
]
],
[
[
"import numpy as np # linear algebra\nimport pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)\n\n# preprocessing/decomposition\nfrom sklearn.preprocessing import LabelEncoder, StandardScaler, OneHotEncoder\nfrom sklearn.decomposition import PCA, FastICA, FactorAnalysis, KernelPCA\n\n# keras \nfrom keras.models import Sequential, load_model\nfrom keras.layers import Dense, Dropout, BatchNormalization, Activation\nfrom keras.wrappers.scikit_learn import KerasRegressor\nfrom keras.callbacks import EarlyStopping, ModelCheckpoint\n\n# model evaluation\nfrom sklearn.model_selection import cross_val_score, KFold, train_test_split\nfrom sklearn.metrics import r2_score, mean_squared_error\n\n# supportive models\nfrom sklearn.ensemble import GradientBoostingRegressor\n# feature selection (from supportive model)\nfrom sklearn.feature_selection import SelectFromModel\n\n# to make results reproducible\nseed = 42 # was 42",
"_____no_output_____"
],
[
"# Read datasets\ntrain = pd.read_csv('../input/train.csv')\ntest = pd.read_csv('../input/test.csv')",
"_____no_output_____"
],
[
"# save IDs for submission\nid_test = test['ID'].copy()",
"_____no_output_____"
],
[
"# glue datasets together\ntotal = pd.concat([train, test], axis=0)\nprint('initial shape: {}'.format(total.shape))\n\n# binary indexes for train/test set split\nis_train = ~total.y.isnull()",
"initial shape: (8418, 378)\n"
],
[
"# find all categorical features\ncf = total.select_dtypes(include=['object']).columns\nprint total[cf].head()",
" X0 X1 X2 X3 X4 X5 X6 X8\n0 k v at a d u j o\n1 k t av e d y l o\n2 az w n c d x j x\n3 az t n f d x l e\n4 az v n f d h d n\n"
],
[
"# make one-hot-encoding convenient way - pandas.get_dummies(df) function\ndummies = pd.get_dummies(\n total[cf],\n drop_first=False # you can set it = True to ommit multicollinearity (crucial for linear models)\n)\n\nprint('oh-encoded shape: {}'.format(dummies.shape))\nprint dummies.head()",
"oh-encoded shape: (8418, 211)\n X0_a X0_aa X0_ab X0_ac X0_ad X0_ae X0_af X0_ag X0_ai X0_aj ... \\\n0 0 0 0 0 0 0 0 0 0 0 ... \n1 0 0 0 0 0 0 0 0 0 0 ... \n2 0 0 0 0 0 0 0 0 0 0 ... \n3 0 0 0 0 0 0 0 0 0 0 ... \n4 0 0 0 0 0 0 0 0 0 0 ... \n\n X8_p X8_q X8_r X8_s X8_t X8_u X8_v X8_w X8_x X8_y \n0 0 0 0 0 0 0 0 0 0 0 \n1 0 0 0 0 0 0 0 0 0 0 \n2 0 0 0 0 0 0 0 0 1 0 \n3 0 0 0 0 0 0 0 0 0 0 \n4 0 0 0 0 0 0 0 0 0 0 \n\n[5 rows x 211 columns]\n"
],
[
"# get rid of old columns and append them encoded\ntotal = pd.concat(\n [\n total.drop(cf, axis=1), # drop old\n dummies # append them one-hot-encoded\n ],\n axis=1 # column-wise\n)\n\nprint('appended-encoded shape: {}'.format(total.shape))",
"appended-encoded shape: (8418, 581)\n"
],
[
"# recreate train/test again, now with dropped ID column\ntrain, test = total[is_train].drop(['ID'], axis=1), total[~is_train].drop(['ID', 'y'], axis=1)\n\n# drop redundant objects\ndel total\n\n# check shape\nprint('\\nTrain shape: {}\\nTest shape: {}'.format(train.shape, test.shape))",
"\nTrain shape: (4209, 580)\nTest shape: (4209, 579)\n"
],
[
"# Calculate additional features: dimensionality reduction components\nn_comp=10 # was 10\n\n# uncomment to scale data before applying decompositions\n# however, all features are binary (in [0,1] interval), i don't know if it's worth something\ntrain_scaled = train.drop('y', axis=1).copy()\ntest_scaled = test.copy()\n'''\nss = StandardScaler()\nss.fit(train.drop('y', axis=1))\n\ntrain_scaled = ss.transform(train.drop('y', axis=1).copy())\ntest_scaled = ss.transform(test.copy())\n'''\n\n# PCA\npca = PCA(n_components=n_comp, random_state=seed)\npca2_results_train = pca.fit_transform(train_scaled)\npca2_results_test = pca.transform(test_scaled)\n\n# ICA\nica = FastICA(n_components=n_comp, random_state=42)\nica2_results_train = ica.fit_transform(train_scaled)\nica2_results_test = ica.transform(test_scaled)\n\n# Factor Analysis\nfca = FactorAnalysis(n_components=n_comp, random_state=seed)\nfca2_results_train = fca.fit_transform(train_scaled)\nfca2_results_test = fca.transform(test_scaled)\n\n# Append it to dataframes\nfor i in range(1, n_comp+1):\n train['pca_' + str(i)] = pca2_results_train[:,i-1]\n test['pca_' + str(i)] = pca2_results_test[:, i-1]\n \n train['ica_' + str(i)] = ica2_results_train[:,i-1]\n test['ica_' + str(i)] = ica2_results_test[:, i-1]\n \n #train['fca_' + str(i)] = fca2_results_train[:,i-1]\n #test['fca_' + str(i)] = fca2_results_test[:, i-1]\n ",
"/home/krohitm/anaconda2/envs/krohitm/lib/python2.7/site-packages/sklearn/decomposition/fastica_.py:116: UserWarning: FastICA did not converge. Consider increasing tolerance or the maximum number of iterations.\n warnings.warn('FastICA did not converge. Consider increasing '\n"
],
[
"# create augmentation by feature importances as additional features\nt = train['y']\ntr = train.drop(['y'], axis=1)\n\n# Tree-based estimators can be used to compute feature importances\nclf = GradientBoostingRegressor(\n max_depth=4, \n learning_rate=0.005, \n random_state=seed, \n subsample=0.95, \n n_estimators=200\n)\n\n# fit regressor\nclf.fit(tr, t)\n\n# df to hold feature importances\nfeatures = pd.DataFrame()\nfeatures['feature'] = tr.columns\nfeatures['importance'] = clf.feature_importances_\nfeatures.sort_values(by=['importance'], ascending=True, inplace=True)\nfeatures.set_index('feature', inplace=True)\n\nprint features.tail()\n\n# select best features\nmodel = SelectFromModel(clf, prefit=True)\ntrain_reduced = model.transform(tr)\n\ntest_reduced = model.transform(test.copy())\n\n# dataset augmentation\ntrain = pd.concat([train, pd.DataFrame(train_reduced)], axis=1)\ntest = pd.concat([test, pd.DataFrame(test_reduced)], axis=1)\n\n# check new shape\nprint('\\nTrain shape: {}\\nTest shape: {}'.format(train.shape, test.shape))",
" importance\nfeature \nX119 0.041776\nX118 0.044236\npca_6 0.091539\nX315 0.128361\nX314 0.610175\n\nTrain shape: (4209, 617)\nTest shape: (4209, 616)\n"
],
[
"# define custom R2 metrics for Keras backend\nfrom keras import backend as K\n\ndef r2_keras(y_true, y_pred):\n SS_res = K.sum(K.square( y_true - y_pred )) \n SS_tot = K.sum(K.square( y_true - K.mean(y_true) ) ) \n return ( 1 - SS_res/(SS_tot + K.epsilon()) )",
"_____no_output_____"
],
[
"from keras.optimizers import SGD, Adagrad, Adadelta\n# base model architecture definition\ndef model():\n model = Sequential()\n #input layer\n model.add(Dense(input_dims, input_dim=input_dims))\n model.add(BatchNormalization())\n model.add(Activation('tanh'))\n model.add(Dropout(0.3))\n # hidden layers\n model.add(Dense(input_dims))\n model.add(BatchNormalization())\n model.add(Activation(act_func))\n model.add(Dropout(0.3))\n \n model.add(Dense(input_dims//2))\n model.add(BatchNormalization())\n model.add(Activation(act_func))\n model.add(Dropout(0.3))\n \n model.add(Dense(input_dims//4, activation=act_func))\n \n # output layer (y_pred)\n model.add(Dense(1, activation='linear'))\n \n # compile this model\n model.compile(loss='mean_squared_error', # one may use 'mean_absolute_error' as alternative\n #optimizer='adam',\n optimizer=Adadelta(),#SGD(lr=0.0001, momentum=0.9),\n #optimizer=Adagrad(),\n metrics=[r2_keras] # you can add several if needed\n )\n \n # Visualize NN architecture\n print(model.summary())\n return model",
"_____no_output_____"
],
[
"# initialize input dimension\ninput_dims = train.shape[1]-1\n\n#activation functions for hidden layers\nact_func = 'tanh' # could be 'relu', 'sigmoid', ...\n\n# make np.seed fixed\nnp.random.seed(seed)\n\n# initialize estimator, wrap model in KerasRegressor\nestimator = KerasRegressor(\n build_fn=model, \n nb_epoch=100, \n batch_size=10,\n verbose=1\n)",
"_____no_output_____"
],
[
"# X, y preparation\nX, y = train.drop('y', axis=1).values, train.y.values\nprint(X.shape)\n\n# X_test preparation\nX_test = test\nprint(X_test.shape)\n\n# train/validation split\nX_tr, X_val, y_tr, y_val = train_test_split(\n X, \n y, \n test_size=0.20, \n random_state=seed\n)",
"(4209, 616)\n(4209, 616)\n"
],
[
"# define path to save model\nimport os\nmodel_path = 'keras_model_adadelta.h5'\n\n# prepare callbacks\ncallbacks = [\n EarlyStopping(\n monitor='val_loss', \n patience=10, # was 10\n verbose=1),\n \n ModelCheckpoint(\n model_path, \n monitor='val_loss', \n save_best_only=True, \n verbose=0)\n]\n\n# fit estimator\nestimator.fit(\n X_tr, \n y_tr, \n #nb_epoch=10, # increase it to 20-100 to get better results\n validation_data=(X_val, y_val),\n verbose=2,\n callbacks=callbacks,\n shuffle=True\n)",
"____________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n====================================================================================================\ndense_6 (Dense) (None, 616) 380072 dense_input_2[0][0] \n____________________________________________________________________________________________________\nbatchnormalization_4 (BatchNorma (None, 616) 2464 dense_6[0][0] \n____________________________________________________________________________________________________\nactivation_4 (Activation) (None, 616) 0 batchnormalization_4[0][0] \n____________________________________________________________________________________________________\ndropout_4 (Dropout) (None, 616) 0 activation_4[0][0] \n____________________________________________________________________________________________________\ndense_7 (Dense) (None, 616) 380072 dropout_4[0][0] \n____________________________________________________________________________________________________\nbatchnormalization_5 (BatchNorma (None, 616) 2464 dense_7[0][0] \n____________________________________________________________________________________________________\nactivation_5 (Activation) (None, 616) 0 batchnormalization_5[0][0] \n____________________________________________________________________________________________________\ndropout_5 (Dropout) (None, 616) 0 activation_5[0][0] \n____________________________________________________________________________________________________\ndense_8 (Dense) (None, 308) 190036 dropout_5[0][0] \n____________________________________________________________________________________________________\nbatchnormalization_6 (BatchNorma (None, 308) 1232 dense_8[0][0] \n____________________________________________________________________________________________________\nactivation_6 (Activation) (None, 308) 0 batchnormalization_6[0][0] \n____________________________________________________________________________________________________\ndropout_6 (Dropout) (None, 308) 0 activation_6[0][0] \n____________________________________________________________________________________________________\ndense_9 (Dense) (None, 154) 47586 dropout_6[0][0] \n____________________________________________________________________________________________________\ndense_10 (Dense) (None, 1) 155 dense_9[0][0] \n====================================================================================================\nTotal params: 1,004,081\nTrainable params: 1,001,001\nNon-trainable params: 3,080\n____________________________________________________________________________________________________\nNone\nTrain on 3367 samples, validate on 842 samples\nEpoch 1/100\n1s - loss: 6394.0030 - r2_keras: -6.1273e+01 - val_loss: 1304.3339 - val_r2_keras: -1.0349e+01\nEpoch 2/100\n1s - loss: 503.2539 - r2_keras: -3.0927e+00 - val_loss: 168.5173 - val_r2_keras: -1.8353e-01\nEpoch 3/100\n1s - loss: 147.3422 - r2_keras: -6.6220e-02 - val_loss: 125.6487 - val_r2_keras: 0.0830\nEpoch 4/100\n1s - loss: 126.9811 - r2_keras: 0.1120 - val_loss: 95.1535 - val_r2_keras: 0.3354\nEpoch 5/100\n1s - loss: 106.3876 - r2_keras: 0.2417 - val_loss: 77.4507 - val_r2_keras: 0.4740\nEpoch 6/100\n1s - loss: 91.3277 - r2_keras: 0.3901 - val_loss: 70.5071 - val_r2_keras: 0.5260\nEpoch 7/100\n1s - loss: 84.9132 - r2_keras: 0.4459 - val_loss: 66.6179 - val_r2_keras: 0.5463\nEpoch 8/100\n1s - loss: 82.4771 - r2_keras: 0.4541 - val_loss: 64.6251 - val_r2_keras: 0.5578\nEpoch 9/100\n1s - loss: 78.3884 - r2_keras: 0.5041 - val_loss: 65.0006 - val_r2_keras: 0.5527\nEpoch 10/100\n1s - loss: 76.8051 - r2_keras: 0.4928 - val_loss: 63.2065 - val_r2_keras: 0.5782\nEpoch 11/100\n1s - loss: 75.2029 - r2_keras: 0.4959 - val_loss: 63.2919 - val_r2_keras: 0.5742\nEpoch 12/100\n1s - loss: 73.0743 - r2_keras: 0.4689 - val_loss: 62.9828 - val_r2_keras: 0.5764\nEpoch 13/100\n1s - loss: 73.0625 - r2_keras: 0.5353 - val_loss: 63.6272 - val_r2_keras: 0.5739\nEpoch 14/100\n1s - loss: 72.3447 - r2_keras: 0.5252 - val_loss: 64.1477 - val_r2_keras: 0.5669\nEpoch 15/100\n1s - loss: 71.8971 - r2_keras: 0.5466 - val_loss: 63.1064 - val_r2_keras: 0.5770\nEpoch 16/100\n1s - loss: 71.5791 - r2_keras: 0.5418 - val_loss: 63.4811 - val_r2_keras: 0.5746\nEpoch 17/100\n1s - loss: 69.6723 - r2_keras: 0.5608 - val_loss: 63.9210 - val_r2_keras: 0.5707\nEpoch 18/100\n1s - loss: 70.0372 - r2_keras: 0.5592 - val_loss: 63.3586 - val_r2_keras: 0.5743\nEpoch 19/100\n1s - loss: 69.1003 - r2_keras: 0.5506 - val_loss: 63.8056 - val_r2_keras: 0.5775\nEpoch 20/100\n1s - loss: 71.4056 - r2_keras: 0.4669 - val_loss: 62.9273 - val_r2_keras: 0.5838\nEpoch 21/100\n1s - loss: 71.2448 - r2_keras: 0.5313 - val_loss: 63.9540 - val_r2_keras: 0.5708\nEpoch 22/100\n1s - loss: 69.2646 - r2_keras: 0.5557 - val_loss: 63.9673 - val_r2_keras: 0.5765\nEpoch 23/100\n1s - loss: 68.0074 - r2_keras: 0.5570 - val_loss: 63.7094 - val_r2_keras: 0.5773\nEpoch 24/100\n1s - loss: 68.3254 - r2_keras: 0.5258 - val_loss: 64.0435 - val_r2_keras: 0.5786\nEpoch 25/100\n1s - loss: 67.1165 - r2_keras: 0.5748 - val_loss: 62.7844 - val_r2_keras: 0.5779\nEpoch 26/100\n1s - loss: 65.9514 - r2_keras: 0.5655 - val_loss: 63.2709 - val_r2_keras: 0.5742\nEpoch 27/100\n1s - loss: 66.1189 - r2_keras: 0.5762 - val_loss: 63.3663 - val_r2_keras: 0.5744\nEpoch 28/100\n1s - loss: 66.3437 - r2_keras: 0.6044 - val_loss: 63.3348 - val_r2_keras: 0.5769\nEpoch 29/100\n1s - loss: 65.7923 - r2_keras: 0.5729 - val_loss: 63.6869 - val_r2_keras: 0.5764\nEpoch 30/100\n1s - loss: 65.0994 - r2_keras: 0.5690 - val_loss: 64.6346 - val_r2_keras: 0.5713\nEpoch 31/100\n1s - loss: 66.0020 - r2_keras: 0.5815 - val_loss: 65.0727 - val_r2_keras: 0.5648\nEpoch 32/100\n1s - loss: 64.7405 - r2_keras: 0.5915 - val_loss: 65.1075 - val_r2_keras: 0.5627\nEpoch 33/100\n1s - loss: 64.8265 - r2_keras: 0.5809 - val_loss: 65.7094 - val_r2_keras: 0.5666\nEpoch 34/100\n1s - loss: 65.1444 - r2_keras: 0.5663 - val_loss: 64.3614 - val_r2_keras: 0.5742\nEpoch 35/100\n1s - loss: 64.4480 - r2_keras: 0.5845 - val_loss: 66.5804 - val_r2_keras: 0.5526\nEpoch 36/100\n1s - loss: 65.1562 - r2_keras: 0.5623 - val_loss: 65.7460 - val_r2_keras: 0.5591\nEpoch 00035: early stopping\n"
],
[
"# if best iteration's model was saved then load and use it\nif os.path.isfile(model_path):\n estimator = load_model(model_path, custom_objects={'r2_keras': r2_keras})\n\n# check performance on train set\nprint('MSE train: {}'.format(mean_squared_error(y_tr, estimator.predict(X_tr))**0.5)) # mse train\nprint('R^2 train: {}'.format(r2_score(y_tr, estimator.predict(X_tr)))) # R^2 train\n\n# check performance on validation set\nprint('MSE val: {}'.format(mean_squared_error(y_val, estimator.predict(X_val))**0.5)) # mse val\nprint('R^2 val: {}'.format(r2_score(y_val, estimator.predict(X_val)))) # R^2 val\npass",
"MSE train: 7.80798918632\nR^2 train: 0.623668164219\nMSE val: 7.92366340175\nR^2 val: 0.596630478318\n"
]
],
[
[
"### Temporary check for results",
"_____no_output_____"
]
],
[
[
"# if best iteration's model was saved then load and use it\nif os.path.isfile(model_path):\n estimator = load_model(model_path, custom_objects={'r2_keras': r2_keras})\n\n# check performance on train set\nprint('MSE train: {}'.format(mean_squared_error(y_tr, estimator.predict(X_tr))**0.5)) # mse train\nprint('R^2 train: {}'.format(r2_score(y_tr, estimator.predict(X_tr)))) # R^2 train\n\n# check performance on validation set\nprint('MSE val: {}'.format(mean_squared_error(y_val, estimator.predict(X_val))**0.5)) # mse val\nprint('R^2 val: {}'.format(r2_score(y_val, estimator.predict(X_val)))) # R^2 val\npass",
"MSE train: 7.80798918632\nR^2 train: 0.623668164219\nMSE val: 7.92366340175\nR^2 val: 0.596630478318\n"
],
[
"# predict results\nres = estimator.predict(X_test.values).ravel()\n\n# create df and convert it to csv\noutput = pd.DataFrame({'id': id_test, 'y': res})\noutput.to_csv('../results/adadelta.csv', index=False)",
"_____no_output_____"
],
[
"estimator.predict(X_tr).ravel()",
"_____no_output_____"
]
],
[
[
"# Trying another method",
"_____no_output_____"
]
],
[
[
"import numpy as np # linear algebra\nimport pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)\nfrom sklearn.preprocessing import LabelEncoder",
"_____no_output_____"
],
[
"# read datasets\ntrain = pd.read_csv('../input/train.csv')\ntest = pd.read_csv('../input/test.csv')\n\n# process columns, apply LabelEncoder to categorical features\nfor c in train.columns:\n if train[c].dtype == 'object':\n lbl = LabelEncoder() \n lbl.fit(list(train[c].values) + list(test[c].values)) \n train[c] = lbl.transform(list(train[c].values))\n test[c] = lbl.transform(list(test[c].values))\n\n# shape \nprint('Shape train: {}\\nShape test: {}'.format(train.shape, test.shape))",
"Shape train: (4209, 378)\nShape test: (4209, 377)\n"
],
[
"from sklearn.decomposition import PCA, FastICA\nn_comp = 10\n\n# PCA\npca = PCA(n_components=n_comp, random_state=42)\npca2_results_train = pca.fit_transform(train.drop([\"y\"], axis=1))\npca2_results_test = pca.transform(test)\n\n# ICA\nica = FastICA(n_components=n_comp, random_state=42)\nica2_results_train = ica.fit_transform(train.drop([\"y\"], axis=1))\nica2_results_test = ica.transform(test)\n\n# Append decomposition components to datasets\nfor i in range(1, n_comp+1):\n train['pca_' + str(i)] = pca2_results_train[:,i-1]\n test['pca_' + str(i)] = pca2_results_test[:, i-1]\n \n train['ica_' + str(i)] = ica2_results_train[:,i-1]\n test['ica_' + str(i)] = ica2_results_test[:, i-1]\n \ny_train = train[\"y\"]\ny_mean = np.mean(y_train)",
"_____no_output_____"
],
[
" ()# mmm, xgboost, loved by everyone ^-^\nimport xgboost as xgb\n\n# prepare dict of params for xgboost to run with\nxgb_params = {\n 'n_trees': 500, \n 'eta': 0.005,\n 'max_depth': 4,\n 'subsample': 0.95,\n 'objective': 'reg:linear',\n 'eval_metric': 'rmse',\n 'base_score': y_mean, # base prediction = mean(target)\n 'silent': 1\n}\n\n# form DMatrices for Xgboost training\ndtrain = xgb.DMatrix(train.drop('y', axis=1), y_train)\ndtest = xgb.DMatrix(test)\n\n# xgboost, cross-validation\ncv_result = xgb.cv(xgb_params, \n dtrain, \n num_boost_round=650, # increase to have better results (~700)\n early_stopping_rounds=50,\n verbose_eval=50, \n show_stdv=True\n )\n\nnum_boost_rounds = len(cv_result)\nprint(num_boost_rounds)\n\n# train model\nxgb_model = xgb.train(dict(xgb_params, silent=0), dtrain, num_boost_round=num_boost_rounds)",
"[0]\ttrain-rmse:12.6399+0.154977\ttest-rmse:12.6383+0.309394\n[50]\ttrain-rmse:11.0903+0.173162\ttest-rmse:11.1515+0.321941\n[100]\ttrain-rmse:10.0181+0.193437\ttest-rmse:10.1468+0.339326\n[150]\ttrain-rmse:9.28968+0.212088\ttest-rmse:9.48773+0.359611\n[200]\ttrain-rmse:8.80275+0.226347\ttest-rmse:9.06668+0.37999\n[250]\ttrain-rmse:8.47847+0.236952\ttest-rmse:8.80237+0.39653\n[300]\ttrain-rmse:8.26052+0.243699\ttest-rmse:8.63721+0.408339\n[350]\ttrain-rmse:8.09265+0.240265\ttest-rmse:8.53675+0.417034\n[400]\ttrain-rmse:7.94872+0.223081\ttest-rmse:8.47722+0.423239\n[450]\ttrain-rmse:7.83237+0.208945\ttest-rmse:8.44253+0.428947\n[500]\ttrain-rmse:7.72702+0.191756\ttest-rmse:8.4231+0.431914\n[550]\ttrain-rmse:7.63446+0.174185\ttest-rmse:8.41282+0.433279\n[600]\ttrain-rmse:7.55366+0.156574\ttest-rmse:8.40588+0.433408\n[649]\ttrain-rmse:7.48377+0.14259\ttest-rmse:8.40391+0.43269\n650\n"
],
[
"# check f2-score (to get higher score - increase num_boost_round in previous cell)\nfrom sklearn.metrics import r2_score\n\n# now fixed, correct calculation\nprint(r2_score(dtrain.get_label(), xgb_model.predict(dtrain)))",
"0.63188978762\n"
],
[
"xgb_model.save_model('xgb_{}.model'.format(num_boost_rounds))",
"_____no_output_____"
],
[
"# make predictions and save results\ny_pred = xgb_model.predict(dtest)\noutput = pd.DataFrame({'id': test['ID'].astype(np.int32), 'y': y_pred})\noutput.to_csv('xgboost-boost_rounds{}-pca-ica.csv'.format(num_boost_rounds), index=False)",
"_____no_output_____"
]
],
[
[
"# Trying to ensemble the results of the two",
"_____no_output_____"
]
],
[
[
"xgb_train_preds = xgb_model.predict(dtrain)\nkeras_train_preds = estimator.predict(X)",
"_____no_output_____"
],
[
"xgb_test_preds = xgb_model.predict(dtest)\nkeras_test_preds = estimator.predict(X_test.values).ravel()",
"_____no_output_____"
],
[
"cum_train_preds = np.column_stack((keras_train_preds, xgb_train_preds))",
"_____no_output_____"
],
[
"cum_test_preds = np.column_stack((keras_test_preds, xgb_test_preds))",
"_____no_output_____"
],
[
"# prepare dict of params for xgboost to run with\nxgb_params_new = {\n 'n_trees': 500, \n 'eta': 0.005,\n 'max_depth': 4,\n 'subsample': 0.95,\n 'objective': 'reg:linear',\n 'eval_metric': 'rmse',\n 'base_score': y_mean, # base prediction = mean(target)\n 'silent': 1\n}\n\n# form DMatrices for Xgboost training\ndtrain_new = xgb.DMatrix(cum_train_preds, y_train)\ndtest_new = xgb.DMatrix(cum_test_preds)\n\n# xgboost, cross-validation\ncv_result_new = xgb.cv(xgb_params_new, \n dtrain, \n num_boost_round=500, # increase to have better results (~700)\n early_stopping_rounds=50,\n verbose_eval=50, \n show_stdv=True\n )\n\nnum_boost_rounds_new = len(cv_result_new)\nprint(num_boost_rounds_new)\n\n# train model\nxgb_model_new = xgb.train(dict(xgb_params_new, silent=0), dtrain_new, num_boost_round=num_boost_rounds_new)",
"[0]\ttrain-rmse:12.6399+0.154977\ttest-rmse:12.6383+0.309394\n[50]\ttrain-rmse:11.0903+0.173162\ttest-rmse:11.1515+0.321941\n[100]\ttrain-rmse:10.0181+0.193437\ttest-rmse:10.1468+0.339326\n[150]\ttrain-rmse:9.28968+0.212088\ttest-rmse:9.48773+0.359611\n[200]\ttrain-rmse:8.80275+0.226347\ttest-rmse:9.06668+0.37999\n[250]\ttrain-rmse:8.47847+0.236952\ttest-rmse:8.80237+0.39653\n[300]\ttrain-rmse:8.26052+0.243699\ttest-rmse:8.63721+0.408339\n[350]\ttrain-rmse:8.09265+0.240265\ttest-rmse:8.53675+0.417034\n[400]\ttrain-rmse:7.94872+0.223081\ttest-rmse:8.47722+0.423239\n[450]\ttrain-rmse:7.83237+0.208945\ttest-rmse:8.44253+0.428947\n[500]\ttrain-rmse:7.72702+0.191756\ttest-rmse:8.4231+0.431914\n[550]\ttrain-rmse:7.63446+0.174185\ttest-rmse:8.41282+0.433279\n[600]\ttrain-rmse:7.55366+0.156574\ttest-rmse:8.40588+0.433408\n[650]\ttrain-rmse:7.48207+0.14231\ttest-rmse:8.40394+0.4326\n[699]\ttrain-rmse:7.41426+0.127975\ttest-rmse:8.40462+0.430224\n700\n"
],
[
"# now fixed, correct calculation\nprint(r2_score(dtrain_new.get_label(), xgb_model_new.predict(dtrain_new)))",
"0.714911383705\n"
],
[
"xgb_model_new.save_model('xgb_{}_ensemble.model'.format(num_boost_rounds_new))",
"_____no_output_____"
],
[
"# make predictions and save results\ny_pred_new = xgb_model_new.predict(dtest_new)\noutput_new = pd.DataFrame({'id': test['ID'].astype(np.int32), 'y': y_pred_new})\noutput_new.to_csv('xgboost-boost_rounds{}-pca-ica_ensemble.csv'.format(num_boost_rounds_new), index=False)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a65cfb51e9bb7ae0f09b0d5304f98e8356689bd
| 320,642 |
ipynb
|
Jupyter Notebook
|
Submission/Final/Notebooks/Part1-Lotka-Volterra-Model.ipynb
|
hillegass/complex-sim
|
acfd3849c19fa3361788a6e8f96ce76ca64be613
|
[
"MIT"
] | 2 |
2020-03-05T20:57:14.000Z
|
2020-03-17T00:45:54.000Z
|
Submission/Final/Notebooks/Part1-Lotka-Volterra-Model.ipynb
|
hillegass/complex-sim
|
acfd3849c19fa3361788a6e8f96ce76ca64be613
|
[
"MIT"
] | null | null | null |
Submission/Final/Notebooks/Part1-Lotka-Volterra-Model.ipynb
|
hillegass/complex-sim
|
acfd3849c19fa3361788a6e8f96ce76ca64be613
|
[
"MIT"
] | null | null | null | 247.982985 | 160,580 | 0.912856 |
[
[
[
"# Simulating a Predator and Prey Relationship\n\nWithout a predator, rabbits will reproduce until they reach the carrying capacity of the land. When coyotes show up, they will eat the rabbits and reproduce until they can't find enough rabbits. We will explore the fluctuations in the two populations over time.",
"_____no_output_____"
],
[
"# Using Lotka-Volterra Model",
"_____no_output_____"
],
[
"## Part 1: Rabbits without predators\n\nAccording to [Mother Earth News](https://www.motherearthnews.com/homesteading-and-livestock/rabbits-on-pasture-intensive-grazing-with-bunnies-zbcz1504), a rabbit eats six square feet of pasture per day. Let's assume that our rabbits live in a five acre clearing in a forest: 217,800 square feet/6 square feet = 36,300 rabbit-days worth of food. For simplicity, let's assume the grass grows back in two months. Thus, the carrying capacity of five acres is 36,300/60 = 605 rabbits.\n\nFemale rabbits reproduce about six to seven times per year. They have six to ten children in a litter. According to [Wikipedia](https://en.wikipedia.org/wiki/Rabbit), a wild rabbit reaches sexual maturity when it is about six months old and typically lives one to two years. For simplicity, let's assume that in the presence of unlimited food, a rabbit lives forever, is immediately sexually mature, and has 1.5 children every month.\n\nFor our purposes, then, let $x_t$ be the number of rabbits in our five acre clearing on month $t$.\n$$\n\\begin{equation*}\n R_t = R_{t-1} + 1.5\\frac{605 - R_{t-1}}{605} R_{t-1}\n\\end{equation*}\n$$\n\nThe formula could be put into general form\n$$\n\\begin{equation*}\n R_t = R_{t-1} + growth_{R} \\times \\big( \\frac{capacity_{R} - R_{t-1}}{capacity_{R}} \\big) R_{t-1}\n\\end{equation*}\n$$\n\nBy doing this, we allow users to interact with growth rate and the capacity value visualize different interaction \n",
"_____no_output_____"
]
],
[
[
"from __future__ import print_function\nfrom ipywidgets import interact, interactive, fixed, interact_manual\nfrom IPython.display import display, clear_output\nimport ipywidgets as widgets\nimport matplotlib.pyplot as plt\nimport numpy as np",
"_____no_output_____"
],
[
"%matplotlib inline\nstyle = {'description_width': 'initial'}\ncapacity_R = widgets.FloatText(description=\"Capacity\", value=605)\ngrowth_rate_R = widgets.FloatText(description=\"Growth rate\", value=1.5)\ninitial_R = widgets.FloatText(description=\"Initial population\",style=style, value=1)\nbutton_R = widgets.Button(description=\"Plot Graph\")\ndisplay(initial_R, capacity_R, growth_rate_R, button_R)\n\ndef plot_graph_r(b):\n print(\"helo\")\n clear_output()\n display(initial_R, capacity_R, growth_rate_R, button_R)\n fig = plt.figure()\n ax = fig.add_subplot(111)\n t = np.arange(0, 20, 1)\n s = np.zeros(t.shape)\n R = initial_R.value\n for i in range(t.shape[0]):\n s[i] = R\n R = R + growth_rate_R.value * (capacity_R.value - R)/(capacity_R.value) * R\n if R < 0.0:\n R = 0.0\n \n ax.plot(t, s)\n ax.set(xlabel='time (months)', ylabel='number of rabbits',\n title='Rabbits Without Predators')\n ax.grid()\n\nbutton_R.on_click(plot_graph_r)",
"_____no_output_____"
]
],
[
[
"**Exercise 1** (1 point). Complete the following functions, find the number of rabbits at time 5, given $x_0$ = 10, population capcity =100, and growth rate = 0.8",
"_____no_output_____"
]
],
[
[
"R_i = 10\nfor i in range(5):\n R_i = int(R_i + 0.8 * (100 - R_i)/(100) * R_i)\n \nprint(f'There are {R_i} rabbits in the system at time 5')\n",
"There are 81 rabbits in the system at time 5\n"
]
],
[
[
"## Tweaking the Growth Function\nThe growth is regulated by this part of the formula:\n$$\n\\begin{equation*}\n \\frac{capacity_{R} - R_{t-1}}{capacity_{R}}\n\\end{equation*}\n$$\nThat is, this fraction (and thus growth) goes to zero when the land is at capacity. As the number of rabbits goes to zero, this fraction goes to 1.0, so growth is at its highest speed. We could substitute in another function that has the same values at zero and at capacity, but has a different shape. For example, \n$$\n\\begin{equation*}\n \\left( \\frac{capacity_{R} - R_{t-1}}{capacity_{R}} \\right)^{\\beta}\n\\end{equation*}\n$$\nwhere $\\beta$ is a positive number. For example, if $\\beta$ is 1.3, it indicates that the rabbits can sense that food supplies are dwindling and pre-emptively slow their reproduction.",
"_____no_output_____"
]
],
[
[
"#### %matplotlib inline\nimport math\nstyle = {'description_width': 'initial'}\ncapacity_R_2 = widgets.FloatText(description=\"Capacity\", value=605)\ngrowth_rate_R_2 = widgets.FloatText(description=\"Growth rate\", value=1.5)\ninitial_R_2 = widgets.FloatText(description=\"Initial population\",style=style, value=1)\nshaping_R_2 = widgets.FloatText(description=\"Shaping\", value=1.3)\nbutton_R_2 = widgets.Button(description=\"Plot Graph\")\ndisplay(initial_R_2, capacity_R_2, growth_rate_R_2, shaping_R_2, button_R_2)\n\ndef plot_graph_r(b):\n clear_output()\n display(initial_R_2, capacity_R_2, growth_rate_R_2, shaping_R_2, button_R_2) \n fig = plt.figure()\n ax = fig.add_subplot(111)\n t = np.arange(0, 20, 1)\n s = np.zeros(t.shape)\n R = initial_R_2.value\n beta = float(shaping_R_2.value)\n for i in range(t.shape[0]):\n s[i] = R\n reserve_ratio = (capacity_R_2.value - R)/capacity_R_2.value\n if reserve_ratio > 0.0:\n R = R + R * growth_rate_R_2.value * reserve_ratio**beta\n else:\n R = R - R * growth_rate_R_2.value * (-1.0 * reserve_ratio)**beta\n if R < 0.0:\n R = 0\n \n ax.plot(t, s)\n ax.set(xlabel='time (months)', ylabel='number of rabbits',\n title='Rabbits Without Predators (Shaped)')\n ax.grid()\n\nbutton_R_2.on_click(plot_graph_r)",
"_____no_output_____"
]
],
[
[
"**Exercise 2** (1 point). Repeat Exercise 1, with $\\beta$ = 1.5 Complete the following functions, find the number of rabbits at time 5. Should we expect to see more rabbits or less?",
"_____no_output_____"
]
],
[
[
"R_i = 10\nb=1.5\nfor i in range(5):\n R_i = int(R_i + 0.8 * ((100 - R_i)/(100))**b * R_i)\n \nprint(f'There are {R_i} rabbits in the system at time 5, less rabbits compare to exercise 1, where beta = 1')",
"There are 64 rabbits in the system at time 5, less rabbits compare to exercise 1, where beta = 1\n"
]
],
[
[
"## Part 2: Coyotes without Prey\nAccording to [Huntwise](https://www.besthuntingtimes.com/blog/2020/2/3/why-you-should-coyote-hunt-how-to-get-started), coyotes need to consume about 2-3 pounds of food per day. Their diet is 90 percent mammalian. The perfect adult cottontail rabbits weigh 2.6 pounds on average. Thus, we assume the coyote eats one rabbit per day. \n\nFor coyotes, the breeding season is in February and March. According to [Wikipedia](https://en.wikipedia.org/wiki/Coyote#Social_and_reproductive_behaviors), females have a gestation period of 63 days, with an average litter size of 6, though the number fluctuates depending on coyote population density and the abundance of food. By fall, the pups are old enough to hunt for themselves.\n\nIn the absence of rabbits, the number of coyotes will drop, as their food supply is scarce.\nThe formula could be put into general form:\n\n$$\n\\begin{align*}\n C_t & \\sim (1 - death_{C}) \\times C_{t-1}\\\\\n &= C_{t-1} - death_{C} \\times C_{t-1}\n\\end{align*}\n$$\n\n",
"_____no_output_____"
]
],
[
[
"%matplotlib inline\nstyle = {'description_width': 'initial'}\ninitial_C=widgets.FloatText(description=\"Initial Population\",style=style,value=200.0)\ndeclining_rate_C=widgets.FloatText(description=\"Death rate\",value=0.5)\nbutton_C=widgets.Button(description=\"Plot Graph\")\ndisplay(initial_C, declining_rate_C, button_C)\n\ndef plot_graph_c(b):\n clear_output()\n display(initial_C, declining_rate_C, button_C)\n fig = plt.figure()\n ax = fig.add_subplot(111)\n t1 = np.arange(0, 20, 1)\n s1 = np.zeros(t1.shape)\n C = initial_C.value\n for i in range(t1.shape[0]):\n s1[i] = C\n C = (1 - declining_rate_C.value)*C\n \n ax.plot(t1, s1)\n ax.set(xlabel='time (months)', ylabel='number of coyotes',\n title='Coyotes Without Prey')\n ax.grid()\n\nbutton_C.on_click(plot_graph_c)\n",
"_____no_output_____"
]
],
[
[
"**Exercise 3** (1 point). Assume the system has 100 coyotes at time 0, the death rate is 0.5 if there are no prey. At what point in time, coyotes become extinct.",
"_____no_output_____"
]
],
[
[
"ti = 0\ncoyotes_init = 100\nc_i = coyotes_init\nd_r = 0.5\nwhile c_i > 10:\n c_i= int((1 - d_r)*c_i)\n ti =ti + 1\nprint(f'At time t={ti}, the coyotes become extinct') ",
"At time t=4, the coyotes become extinct\n"
]
],
[
[
"## Part 3: Interaction Between Coyotes and Rabbit\nWith the simple interaction from the first two parts, now we can combine both interaction and come out with simple interaction.\n$$\n\\begin{align*}\n R_t &= R_{t-1} + growth_{R} \\times \\big( \\frac{capacity_{R} - R_{t-1}}{capacity_{R}} \\big) R_{t-1} - death_{R}(C_{t-1})\\times R_{t-1}\\\\\\\\\n C_t &= C_{t-1} - death_{C} \\times C_{t-1} + growth_{C}(R_{t-1}) \\times C_{t-1}\n\\end{align*}\n$$\n\nIn equations above, death rate of rabbit is a function parameterized by the amount of coyote. Similarly, the growth rate of coyotes is a function parameterized by the amount of the rabbit.\n\nThe death rate of the rabbit should be $0$ if there are no coyotes, while it should approach $1$ if there are many coyotes. One of the formula fulfilling this characteristics is hyperbolic function.\n\n$$\n\\begin{equation}\ndeath_R(C) = 1 - \\frac{1}{xC + 1}\n\\end{equation}\n$$\n\nwhere $x$ determines how quickly $death_R$ increases as the number of coyotes ($C$) increases. Similarly, the growth rate of the coyotes should be $0$ if there are no rabbits, while it should approach infinity if there are many rabbits. One of the formula fulfilling this characteristics is a linear function.\n\n$$\n\\begin{equation}\ngrowth_C(R) = yC\n\\end{equation}\n$$\n\nwhere $y$ determines how quickly $growth_C$ increases as number of rabbit ($R$) increases.\n\nPutting all together, the final equtions are\n\n$$\n\\begin{align*}\n R_t &= R_{t-1} + growth_{R} \\times \\big( \\frac{capacity_{R} - R_{t-1}}{capacity_{R}} \\big) R_{t-1} - \\big( 1 - \\frac{1}{xC_{t-1} + 1} \\big)\\times R_{t-1}\\\\\\\\\n C_t &= C_{t-1} - death_{C} \\times C_{t-1} + yR_{t-1}C_{t-1}\n\\end{align*}\n$$\n\n",
"_____no_output_____"
],
[
"**Exercise 4** (3 point). The model we have created above is a variation of the Lotka-Volterra model, which describes various forms of predator-prey interactions. Complete the following functions, which should generate the state variables plotted over time. Blue = prey, Orange = predators. ",
"_____no_output_____"
]
],
[
[
"%matplotlib inline\ninitial_rabbit = widgets.FloatText(description=\"Initial Rabbit\",style=style, value=1)\ninitial_coyote = widgets.FloatText(description=\"Initial Coyote\",style=style, value=1)\ncapacity = widgets.FloatText(description=\"Capacity rabbits\", style=style,value=5)\ngrowth_rate = widgets.FloatText(description=\"Growth rate rabbits\", style=style,value=1)\ndeath_rate = widgets.FloatText(description=\"Death rate coyotes\", style=style,value=1)\nx = widgets.FloatText(description=\"Death rate ratio due to coyote\",style=style, value=1)\ny = widgets.FloatText(description=\"Growth rate ratio due to rabbit\",style=style, value=1)\nbutton = widgets.Button(description=\"Plot Graph\")\ndisplay(initial_rabbit, initial_coyote, capacity, growth_rate, death_rate, x, y, button)\ndef plot_graph(b):\n clear_output()\n display(initial_rabbit, initial_coyote, capacity, growth_rate, death_rate, x, y, button)\n fig = plt.figure()\n ax = fig.add_subplot(111)\n t = np.arange(0, 20, 0.5)\n s = np.zeros(t.shape)\n p = np.zeros(t.shape)\n R = initial_rabbit.value\n C = initial_coyote.value\n for i in range(t.shape[0]):\n s[i] = R\n p[i] = C\n R = R + growth_rate.value * (capacity.value - R)/(capacity.value) * R - (1 - 1/(x.value*C + 1))*R\n C = C - death_rate.value * C + y.value*s[i]*C\n \n ax.plot(t, s, label=\"rabit\")\n ax.plot(t, p, label=\"coyote\")\n ax.set(xlabel='time (months)', ylabel='population size',\n title='Coyotes-Rabbit (Predator-Prey) Relationship')\n ax.grid()\n ax.legend()\n\nbutton.on_click(plot_graph)",
"_____no_output_____"
]
],
[
[
"The system shows an oscillatory behavior. Let's try to verify the nonlinear oscillation in phase space visualization.\n",
"_____no_output_____"
],
[
"## Part 4: Trajectories and Direction Fields for a system of equations \n\nTo further demonstrate the predator numbers rise and fall cyclically with their preferred prey, we will be using the Lotka-Volterra equations, which is based on differential equations. The Lotka-Volterra Prey-Predator model involves two equations, one describes the changes in number of preys and the second one decribes the changes in number of predators. The dynamics of the interaction between a rabbit population $R_t$ and a coyotes $C_t$ is described by the following differential equations:\n$$\n\\begin{align*}\n\\frac{dR}{dt} = aR_t - bR_tC_t\n\\end{align*}\n$$\n\n$$\n\\begin{align*}\n\\frac{dC}{dt} = bdR_tC_t - cC_t\n\\end{align*}\n$$\n\nwith the following notations:\n\nR$_t$: number of preys(rabbits)\n\nC$_t$: number of predators(coyotes)\n\na: natural growing rate of rabbits, when there is no coyotes\n\nb: natural dying rate of rabbits, which is killed by coyotes per unit of time\n\nc: natural dying rate of coyotes, when ther is not rabbits\n\nd: natural growing rate of coyotes with which consumed prey is converted to predator\n\nWe start from defining the system of ordinary differential equations, and then find the equilibrium points for our system. Equilibrium occurs when the frowth rate is 0, and we can see that we have two equilibrium points in our example, the first one happens when theres no preys or predators, which represents the extinction of both species, the second equilibrium happens when $R_t=\\frac{c}{b d}$ $C_t=\\frac{a}{b}$. Move on, we will use the scipy to help us integrate the differential equations, and generate the plot of evolution for both species:\n",
"_____no_output_____"
],
[
"**Exercise 5** (3 point). As we can tell from the simulation results of predator-prey model, the system shows an oscillatory behavior. Find the equilibrium points of the system and generate the phase space visualization to demonstrate the oscillation seen previously is nonlinear with distorted orbits.",
"_____no_output_____"
]
],
[
[
"from scipy import integrate\n\n#using the same input number from the previous example\ninput_a = widgets.FloatText(description=\"a\",style=style, value=1)\ninput_b = widgets.FloatText(description=\"b\",style=style, value=1)\ninput_c = widgets.FloatText(description=\"c\",style=style, value=1)\ninput_d = widgets.FloatText(description=\"d\",style=style, value=1)\n# Define the system of ODEs\n# P[0] is prey, P[1] is predator\ndef dP_dt(P,t=0):\n return np.array([a*P[0]-b*P[0]*P[1], d*b*P[0]*P[1]-c*P[1]])\n\nbutton_draw_trajectories = widgets.Button(description=\"Plot Graph\")\ndisplay(input_a, input_b, input_c, input_d, button_draw_trajectories)\n\ndef plot_trajectories(graph):\n global a, b, c, d, eq1, eq2\n clear_output()\n display(input_a, input_b, input_c, input_d, button_draw_trajectories)\n a = input_a.value\n b = input_b.value\n c = input_c.value\n d = input_d.value\n # Define the Equilibrium points\n eq1 = np.array([0. , 0.])\n eq2 = np.array([c/(d*b),a/b])\n values = np.linspace(0.1, 3, 10)\n # Colors for each trajectory\n vcolors = plt.cm.autumn_r(np.linspace(0.1, 1., len(values)))\n f = plt.figure(figsize=(10,6))\n t = np.linspace(0, 150, 1000)\n for v, col in zip(values, vcolors):\n # Starting point \n P0 = v*eq2\n P = integrate.odeint(dP_dt, P0, t)\n plt.plot(P[:,0], P[:,1],\n lw= 1.5*v, # Different line width for different trajectories\n color=col, label='P0=(%.f, %.f)' % ( P0[0], P0[1]) )\n ymax = plt.ylim(bottom=0)[1]\n xmax = plt.xlim(left=0)[1]\n nb_points = 20\n x = np.linspace(0, xmax, nb_points)\n y = np.linspace(0, ymax, nb_points)\n X1,Y1 = np.meshgrid(x, y) \n DX1, DY1 = dP_dt([X1, Y1]) \n M = (np.hypot(DX1, DY1)) \n M[M == 0] = 1. \n DX1 /= M \n DY1 /= M\n plt.title('Trajectories and direction fields')\n Q = plt.quiver(X1, Y1, DX1, DY1, M, pivot='mid', cmap=plt.cm.plasma)\n plt.xlabel('Number of rabbits')\n plt.ylabel('Number of coyotes')\n plt.legend()\n plt.grid()\n plt.xlim(0, xmax)\n plt.ylim(0, ymax)\n print(f\"\\n\\nThe equilibrium pointsof the system are:\", list(eq1), list(eq2))\n plt.show() \n \nbutton_draw_trajectories.on_click(plot_trajectories)\n",
"_____no_output_____"
]
],
[
[
"The model here is described in continuous differential equations, thus there is no jump or intersections between the trajectories.",
"_____no_output_____"
],
[
"\n\n## Part 5: Multiple Predators and Preys Relationship\n\nThe previous relationship could be extended to multiple predators and preys relationship",
"_____no_output_____"
],
[
"**Exercise 6** (3 point). Develop a discrete-time mathematical model of four species, and each two of them competing for the same resource, and simulate its behavior. Plot the simulation results.",
"_____no_output_____"
]
],
[
[
"%matplotlib inline\ninitial_rabbit2 = widgets.FloatText(description=\"Initial Rabbit\", style=style,value=2)\ninitial_coyote2 = widgets.FloatText(description=\"Initial Coyote\",style=style, value=2)\ninitial_deer2 = widgets.FloatText(description=\"Initial Deer\", style=style,value=1)\ninitial_wolf2 = widgets.FloatText(description=\"Initial Wolf\", style=style,value=1)\npopulation_capacity = widgets.FloatText(description=\"capacity\",style=style, value=10)\npopulation_capacity_rabbit = widgets.FloatText(description=\"capacity rabbit\",style=style, value=3)\ngrowth_rate_rabbit = widgets.FloatText(description=\"growth rate rabbit\",style=style, value=1)\ndeath_rate_coyote = widgets.FloatText(description=\"death rate coyote\",style=style, value=1)\ngrowth_rate_deer = widgets.FloatText(description=\"growth rate deer\",style=style, value=1)\ndeath_rate_wolf = widgets.FloatText(description=\"death rate wolf\",style=style, value=1)\nx1 = widgets.FloatText(description=\"death rate ratio due to coyote\",style=style, value=1)\ny1 = widgets.FloatText(description=\"growth rate ratio due to rabbit\", style=style,value=1)\nx2 = widgets.FloatText(description=\"death rate ratio due to wolf\",style=style, value=1)\ny2 = widgets.FloatText(description=\"growth rate ratio due to deer\", style=style,value=1)\nplot2 = widgets.Button(description=\"Plot Graph\")\ndisplay(initial_rabbit2, initial_coyote2,initial_deer2, initial_wolf2, population_capacity, \n population_capacity_rabbit, growth_rate_rabbit, growth_rate_deer, death_rate_coyote,death_rate_wolf,\n x1, y1,x2, y2, plot2)\ndef plot_graph(b):\n clear_output()\n display(initial_rabbit2, initial_coyote2,initial_deer2, initial_wolf2, population_capacity, \n population_capacity_rabbit, growth_rate_rabbit, growth_rate_deer, death_rate_coyote,death_rate_wolf,\n x1, y1,x2, y2, plot2)\n fig = plt.figure()\n ax = fig.add_subplot(111)\n t_m = np.arange(0, 20, 0.5)\n r_m = np.zeros(t_m.shape)\n c_m = np.zeros(t_m.shape)\n d_m = np.zeros(t_m.shape)\n w_m = np.zeros(t_m.shape)\n R_m = initial_rabbit2.value\n C_m = initial_coyote2.value\n D_m = initial_deer2.value\n W_m = initial_wolf2.value\n population_capacity_deer = population_capacity.value - population_capacity_rabbit.value\n for i in range(t_m.shape[0]):\n r_m[i] = R_m\n c_m[i] = C_m\n d_m[i] = D_m\n w_m[i] = W_m\n \n R_m = R_m + growth_rate_rabbit.value * (population_capacity_rabbit.value - R_m)\\\n /(population_capacity_rabbit.value) * R_m - (1 - 1/(x1.value*C_m + 1))*R_m - (1 - 1/(x2.value*W_m + 1))*R_m \n D_m = D_m + growth_rate_deer.value * (population_capacity_deer - D_m) \\\n /(population_capacity_deer) * D_m - (1 - 1/(x1.value*C_m + 1))*D_m - (1 - 1/(x2.value*W_m + 1))*D_m\n \n C_m = C_m - death_rate_coyote.value * C_m + y1.value*r_m[i]*C_m + y2.value*d_m[i]*C_m\n W_m = W_m - death_rate_wolf.value * W_m + y1.value*r_m[i]*W_m + y2.value*d_m[i]*W_m\n \n ax.plot(t_m, r_m, label=\"rabit\")\n ax.plot(t_m, c_m, label=\"coyote\")\n ax.plot(t_m, d_m, label=\"deer\")\n ax.plot(t_m, w_m, label=\"wolf\")\n ax.set(xlabel='time (months)', ylabel='population',\n title='Multiple Predator Prey Relationship')\n ax.grid()\n ax.legend()\n\nplot2.on_click(plot_graph)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
]
] |
4a65e54d4f2697403c08dcd48169571300c506d8
| 1,851 |
ipynb
|
Jupyter Notebook
|
Assignment_2_Day_3.ipynb
|
Amitkumarpanda192/LetsUpgrade-Python
|
fa8afd1f24a438de0231c1ca8bad90a4755545c1
|
[
"Apache-2.0"
] | null | null | null |
Assignment_2_Day_3.ipynb
|
Amitkumarpanda192/LetsUpgrade-Python
|
fa8afd1f24a438de0231c1ca8bad90a4755545c1
|
[
"Apache-2.0"
] | null | null | null |
Assignment_2_Day_3.ipynb
|
Amitkumarpanda192/LetsUpgrade-Python
|
fa8afd1f24a438de0231c1ca8bad90a4755545c1
|
[
"Apache-2.0"
] | null | null | null | 26.442857 | 253 | 0.475419 |
[
[
[
"<a href=\"https://colab.research.google.com/github/Amitkumarpanda192/LetsUpgrade-Python/blob/master/Assignment_2_Day_3.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
],
[
"**Prime number in between 1 to 200**",
"_____no_output_____"
]
],
[
[
"\nfor num in range(2,200):\n flag =0\n for i in range(2,num//2):\n if num % i == 0:\n break\n else:\n print(num,end =',')",
"2,3,4,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,"
]
]
] |
[
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code"
]
] |
4a65e5b98973b836cbd6a6a4c6e795a5280fa1eb
| 799,537 |
ipynb
|
Jupyter Notebook
|
.ipynb_checkpoints/testing_costamesa-checkpoint.ipynb
|
saramhse/appraisal_app_v1
|
eb97fcb143301413a21520309ce38576157715ae
|
[
"MIT"
] | null | null | null |
.ipynb_checkpoints/testing_costamesa-checkpoint.ipynb
|
saramhse/appraisal_app_v1
|
eb97fcb143301413a21520309ce38576157715ae
|
[
"MIT"
] | null | null | null |
.ipynb_checkpoints/testing_costamesa-checkpoint.ipynb
|
saramhse/appraisal_app_v1
|
eb97fcb143301413a21520309ce38576157715ae
|
[
"MIT"
] | null | null | null | 71.681639 | 98,336 | 0.655546 |
[
[
[
"import csv\nimport pickle\nimport pandas as pd\nimport numpy as np\n\nimport requests\nimport json\n\nimport seaborn as sns\nimport matplotlib.pyplot as plt\n\nfrom scipy import stats\nimport sklearn.preprocessing\nfrom sklearn import preprocessing",
"_____no_output_____"
],
[
"data=pd.read_csv(\"temp1-costamesa.csv\")\ndata",
"_____no_output_____"
],
[
"data.columns",
"_____no_output_____"
],
[
"data=data.drop_duplicates(subset=['ADDRESS'], keep='first')\ndata",
"_____no_output_____"
],
[
"data=data.rename(columns={'URL (SEE http://www.redfin.com/buy-a-home/comparative-market-analysis FOR INFO ON PRICING)':'url',\n 'PROPERTY TYPE':'type',\n 'CITY':'city',\n 'ZIP':'zip',\n 'PRICE':'price',\n 'BEDS':'beds',\n 'BATHS':'baths',\n 'SQUARE FEET':'sqrft',\n 'LOT SIZE':'lot',\n 'YEAR BUILT':'built',\n 'DAYS ON MARKET':'dom',\n '$/SQUARE FEET':'$/sqrft',\n 'HOA/MONTH':'hoa',\n 'LATITUDE':'lat',\n 'LONGITUDE':'lon'})",
"_____no_output_____"
],
[
"data['full_address'] = data['ADDRESS'] + \", \" + data['city'] + \", \" + data['STATE']\ndata.head()",
"_____no_output_____"
],
[
"# api_key='AIzaSyAOjSf4Tk_StWcxTANG_2Sih0IN19W9cSI'\n# url=\"https://maps.googleapis.com/maps/api/geocode/json?address={}&key={}\"\n# url",
"_____no_output_____"
],
[
"# lat_list=[]\n# lon_list=[]",
"_____no_output_____"
],
[
"\n# for i in data.full_address:\n# response=requests.get(url.format(i,api_key)).json()\n# print(json.dumps(response, indent=4, sort_keys=True))\n# lat=response[\"results\"][0][\"geometry\"][\"location\"][\"lat\"]\n# lat_list.append(lat)\n# lon=response[\"results\"][0][\"geometry\"][\"location\"][\"lng\"]\n# lon_list.append(lon)\n\n",
"_____no_output_____"
],
[
"# len(lat_list)",
"_____no_output_____"
],
[
"# data['lat_backup']=pd.Series(lat_list)\n# data['lon_backup']=pd.Series(lon_list)\n# data",
"_____no_output_____"
],
[
"data=data[['type','city','zip','price','beds','baths','sqrft','lot','built',\n 'dom','$/sqrft','hoa','lat','lon']]\n",
"_____no_output_____"
],
[
"data.describe()",
"_____no_output_____"
],
[
"data.info()",
"<class 'pandas.core.frame.DataFrame'>\nInt64Index: 253 entries, 0 to 252\nData columns (total 14 columns):\ntype 253 non-null object\ncity 253 non-null object\nzip 253 non-null int64\nprice 253 non-null int64\nbeds 252 non-null float64\nbaths 252 non-null float64\nsqrft 252 non-null float64\nlot 180 non-null float64\nbuilt 233 non-null float64\ndom 233 non-null float64\n$/sqrft 252 non-null float64\nhoa 112 non-null float64\nlat 253 non-null float64\nlon 253 non-null float64\ndtypes: float64(10), int64(2), object(2)\nmemory usage: 29.6+ KB\n"
],
[
"data['type']=data['type'].replace('Single Family Residential','sfr')\ndata['type']=data['type'].replace('Condo/Co-op','condo')\ndata['type']=data['type'].replace('Townhouse','thr')\ndata['type']=data['type'].replace('Multi-Family (2-4 Unit)','mfr')\ndata['type']=data['type'].replace('Multi-Family (5+ Unit)','mfr')\n",
"_____no_output_____"
],
[
"data.isnull().sum()\n",
"_____no_output_____"
],
[
"data=data[data['built'].notnull()]\ndata.head()",
"_____no_output_____"
],
[
"print(data.isnull().sum())\nfrom numpy import nan\ndata[data['hoa'].isnull()]\n",
"type 0\ncity 0\nzip 0\nprice 0\nbeds 1\nbaths 1\nsqrft 1\nlot 53\nbuilt 0\ndom 0\n$/sqrft 1\nhoa 121\nlat 0\nlon 0\ndtype: int64\n"
],
[
"#pass 0 for hoa of NaN homes with yeaer before 2000\nmask=(data['hoa'].isnull()) & (data['built']<2000)\ndata['hoa']=data['hoa'].mask(mask,0)\n",
"_____no_output_____"
],
[
"data.info()",
"<class 'pandas.core.frame.DataFrame'>\nInt64Index: 233 entries, 0 to 247\nData columns (total 14 columns):\ntype 233 non-null object\ncity 233 non-null object\nzip 233 non-null int64\nprice 233 non-null int64\nbeds 232 non-null float64\nbaths 232 non-null float64\nsqrft 232 non-null float64\nlot 180 non-null float64\nbuilt 233 non-null float64\ndom 233 non-null float64\n$/sqrft 232 non-null float64\nhoa 201 non-null float64\nlat 233 non-null float64\nlon 233 non-null float64\ndtypes: float64(10), int64(2), object(2)\nmemory usage: 27.3+ KB\n"
],
[
"data=data.set_index('zip')\ndata['lot medians']=data.groupby('zip')['lot'].median()\ndata.head()",
"_____no_output_____"
],
[
"mask1=(data['lot'].isnull())\ndata['lot']=data['lot'].mask(mask1,data['lot medians'])\ndata.head()",
"_____no_output_____"
],
[
"del data['lot medians']",
"_____no_output_____"
],
[
"data=data.reset_index()\ndata.info()",
"<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 233 entries, 0 to 232\nData columns (total 14 columns):\nzip 233 non-null int64\ntype 233 non-null object\ncity 233 non-null object\nprice 233 non-null int64\nbeds 232 non-null float64\nbaths 232 non-null float64\nsqrft 232 non-null float64\nlot 233 non-null float64\nbuilt 233 non-null float64\ndom 233 non-null float64\n$/sqrft 232 non-null float64\nhoa 201 non-null float64\nlat 233 non-null float64\nlon 233 non-null float64\ndtypes: float64(10), int64(2), object(2)\nmemory usage: 25.6+ KB\n"
],
[
"print(data.isnull().sum())\n\ndata[data['beds'].isnull()]\n",
"zip 0\ntype 0\ncity 0\nprice 0\nbeds 1\nbaths 1\nsqrft 1\nlot 0\nbuilt 0\ndom 0\n$/sqrft 1\nhoa 32\nlat 0\nlon 0\ndtype: int64\n"
],
[
"data = data.dropna(axis=0, subset=['beds'])\nprint(data.isnull().sum())\n",
"zip 0\ntype 0\ncity 0\nprice 0\nbeds 0\nbaths 0\nsqrft 1\nlot 0\nbuilt 0\ndom 0\n$/sqrft 1\nhoa 32\nlat 0\nlon 0\ndtype: int64\n"
],
[
"data = data.dropna(axis=0, subset=['sqrft'])\nprint(data.isnull().sum())\n",
"zip 0\ntype 0\ncity 0\nprice 0\nbeds 0\nbaths 0\nsqrft 0\nlot 0\nbuilt 0\ndom 0\n$/sqrft 0\nhoa 32\nlat 0\nlon 0\ndtype: int64\n"
],
[
"data.shape\n",
"_____no_output_____"
]
],
[
[
"# Multicollinearity check",
"_____no_output_____"
]
],
[
[
"correlations=data.corr()\nplt.subplots(figsize=(10,8))\nsns.heatmap(correlations,annot=True)\nfig=plt.figure()\nplt.show()\n\n\n# beds\n# baths\n# sqrft\n# lot\n# per_sqrft\n# zipcode\n# types\n#yr built\n#hoa\n\n\n#multi-collinearity: beds and sqrft/baths and sqrft/beds and baths",
"_____no_output_____"
],
[
"plt.subplots(figsize=(20,8))\nsns.distplot(data['price'],fit=stats.norm)\n\n(mu,sigma)=stats.norm.fit(data['price'])\nplt.legend(['Normal Distribution Params mu={} and sigma={}'.format(mu,sigma)],loc='best')\nplt.ylabel('frequency')\n\nfig=plt.figure()\nplt.show()",
"_____no_output_____"
],
[
"mini=data['built'].min()\nmaxi=data['built'].max()\nprint(mini,maxi)\n\ndecades_no=[]\nfor i in data.built:\n decades=(i-mini)/10\n# print(decades)\n decades_no.append(decades)\n \ndata['train_built']=pd.Series(decades_no)\n\ndata['train_built']=data['train_built'].round(0)\ndata.head()",
"1912.0 2018.0\n"
],
[
"data.tail()",
"_____no_output_____"
],
[
"data = data.dropna(axis=0, subset=['train_built'])\n",
"_____no_output_____"
],
[
"decades_no",
"_____no_output_____"
],
[
"len(decades_no)",
"_____no_output_____"
],
[
"len(data)",
"_____no_output_____"
]
],
[
[
"# Pickled Cleaned Irvine DF Pre-Inference",
"_____no_output_____"
]
],
[
[
"\n# data.to_pickle('costamesa_data.pkl')\ninfile=open('costamesa_data.pkl','rb')\ntrain=pickle.load(infile)\n\ntrain.head()",
"_____no_output_____"
],
[
"train[train['train_built'].isnull()]",
"_____no_output_____"
],
[
"train",
"_____no_output_____"
],
[
"len(train)",
"_____no_output_____"
],
[
"train.info()",
"<class 'pandas.core.frame.DataFrame'>\nInt64Index: 229 entries, 0 to 230\nData columns (total 15 columns):\nzip 229 non-null int64\ntype 229 non-null object\ncity 229 non-null object\nprice 229 non-null int64\nbeds 229 non-null float64\nbaths 229 non-null float64\nsqrft 229 non-null float64\nlot 229 non-null float64\nbuilt 229 non-null float64\ndom 229 non-null float64\n$/sqrft 229 non-null float64\nhoa 199 non-null float64\nlat 229 non-null float64\nlon 229 non-null float64\ntrain_built 229 non-null float64\ndtypes: float64(11), int64(2), object(2)\nmemory usage: 28.6+ KB\n"
],
[
"train['train_built'].unique()",
"_____no_output_____"
]
],
[
[
"# Flask Functions for Front End:\n## Sizer Assist for Pred DF + Min Built Return",
"_____no_output_____"
]
],
[
[
"def train_flask():\n infile=open('costamesa_data.pk1','rb')\n train=pickle.load(infile)\n \n cols=['zip','type','train_built','beds','baths','sqrft','lot','$/sqrft']\n x=train[cols]\n \n train['$/sqrft']=np.log1p(train['$/sqrft'])\n train['sqrft']=np.log1p(train['sqrft'])\n train['lot']=np.log1p(train['lot'])\n x=pd.get_dummies(x,columns=['zip','type','train_built'])\n\n return x\n\n",
"_____no_output_____"
],
[
"def min_built():\n infile=open('costamesa_data.pk1','rb')\n train=pickle.load(infile)\n \n #for integrating: load all pickle files\n #output is a list of minimums\n \n costamesa_mini=int(train['built'].min())\n \n return costamesa_mini\n\n# print(train['built'].min())\n# type(int(train['built'].min()))\n\n# def min_built():\n# infile=open('irvine_data.pk1','rb')\n# train=pickle.load(infile)\n# f=open('whatever')\n# train_tust=pickle.load(f)\n \n# #for integrating: load all pickle files\n# #output is a list of minimums\n \n# irvine_mini=train['built'].min()\n# tustin_mini=train_tustin['built'].min()\n \n# return [irvine_mini,tustin_mini]\nmin_b=min_built()\nmin_b",
"_____no_output_____"
]
],
[
[
"# Inference Tests",
"_____no_output_____"
]
],
[
[
"# mini=train['built'].min()\n# maxi=train['built'].max()\n# print(mini,maxi)\n\n# decades_no=[]\n# for i in train.built:\n# decades=(i-mini)/10\n# # print(decades)\n# decades_no.append(decades)\n \n# train['train_built']=pd.Series(decades_no)\n\n# train['train_built']=train['train_built'].round(0)\n# train.head()\n",
"_____no_output_____"
],
[
"anova_data=train[['price','train_built']]\n\n# anova_data['train_built']=anova_data['train_built'].round(0)\n# bin_series=anova_data['train_built'].value_counts()\n\n##bin without series:\nbins=pd.unique(anova_data.train_built.values)\nf_test_data={grp:anova_data['price'][anova_data.train_built==grp] for grp in bins}\nprint(bins)\n\nfrom scipy import stats\n\nF, p=stats.f_oneway(f_test_data[5.],f_test_data[10.],f_test_data[9.],f_test_data[11.],f_test_data[7.],\n f_test_data[6.],f_test_data[8.],f_test_data[3.],f_test_data[4.],f_test_data[0.])\n\n# array([ 5., 10., 9., 11., 7., 6., 8., 3., 4., 0.])\n\nprint(F,p)\n\n\nk=len(pd.unique(anova_data.train_built.values))\nN=len(anova_data.values)\nn=anova_data['train_built'].value_counts()\n\n#F-static: btw/within variability\n\nDFbetween = k - 1\nDFwithin = N - k\nDFtotal = N - 1\nprint(f\"degrees of freedom between: {DFbetween}\")\nprint(f\"degrees of freedom within: {DFwithin}\")\nprint(f\"degrees of freedom total: {DFtotal}\")\n\n\n#reject null, not all group means are equal, variance exists, include year built in ML\n\n",
"[ 5. 10. 9. 11. 7. 6. 8. 3. 4. 0.]\n5.474131382599535 8.718690054616165e-07\ndegrees of freedom between: 9\ndegrees of freedom within: 219\ndegrees of freedom total: 228\n"
],
[
"import statsmodels.api as sm\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.metrics import r2_score, mean_squared_error, accuracy_score\n\n##non-zero HOA data df prep for reg. analysis, p-value, 95% CI\nhoa_f_prep=train[train['hoa'].notnull()]\n# hoa_f_prep.info()\ndep_var=hoa_f_prep['price']\nindep_var=hoa_f_prep['hoa']\n\nindep_var=indep_var.values.reshape(-1,1)\n\n# define the model\nmodel = LinearRegression()\n\n# fit the model to training data\nmodel.fit(indep_var, dep_var)\n\n#run p-test\nparams = np.append(model.intercept_,model.coef_)\npredictions = model.predict(indep_var)\n\n\nnew_indep_var = pd.DataFrame({\"Constant\":np.ones(len(indep_var))}).join(pd.DataFrame(indep_var))\nMSE = (sum((dep_var-predictions)**2))/(len(new_indep_var)-len(new_indep_var.columns))\n\nvar_b = MSE*(np.linalg.inv(np.dot(new_indep_var.T,new_indep_var)).diagonal())\nsd_b = np.sqrt(var_b)\nts_b = params/ sd_b\n\np_values =[2*(1-stats.t.cdf(np.abs(i),(len(new_indep_var)-1))) for i in ts_b]\n\nsd_b = np.round(sd_b,3)\nts_b = np.round(ts_b,3)\np_values = np.round(p_values,3)\nparams = np.round(params,4)\n\np_test_df = pd.DataFrame()\np_test_df[\"Coefficients\"],p_test_df[\"Standard Errors\"],p_test_df[\"t values\"],p_test_df[\"Probabilites\"] = [params,sd_b,ts_b,p_values]\nprint(p_test_df)\n\n# predict\ndep_var_pred = model.predict(indep_var)\n\nprint(r2_score(dep_var, dep_var_pred))\n\n#low r2 value, despite low p-val, t-statistic lookup conclusive, we look for precise predictions for upcoming ML section, it is statistically safe to disregard hoa inferentially\n",
" Coefficients Standard Errors t values Probabilites\n0 1.216295e+06 44792.450 27.154 0.0\n1 -1.652443e+03 216.527 -7.632 0.0\n0.22817970954403455\n"
],
[
"train['price']=np.log1p(train['price'])\n",
"_____no_output_____"
],
[
"plt.subplots(figsize=(20,8))\nsns.distplot(train['price'],fit=stats.norm)\n\n\n(mu,sigma)=stats.norm.fit(train['price'])\n\nplt.legend(['Normal Distribution Params mu={} and sigma={}'.format(mu,sigma)],loc='best')\nplt.ylabel('frequency')\n\nfig=plt.figure()\nstats.probplot(train['price'],plot=plt)\n\nplt.show()",
"_____no_output_____"
],
[
"# cols=['zip','type','beds','baths','sqrft','lot','$/sqrft','train_built']\n\ncols=['zip','train_built','type','beds','baths','sqrft','lot']\nx=train[cols]\ny=train['price']\n\n",
"_____no_output_____"
],
[
"# y=np.log1p(y)\n\n# plt.subplots(figsize=(20,8))\n# sns.distplot(y,fit=stats.norm)\n\n\n# (mu,sigma)=stats.norm.fit(y)\n\n# plt.legend(['Normal Distribution Params mu={} and sigma={}'.format(mu,sigma)],loc='best')\n# plt.ylabel('frequency')\n\n# fig=plt.figure()\n# stats.probplot(y,plot=plt)\n\n# plt.show()\n",
"_____no_output_____"
],
[
"train['$/sqrft']=np.log1p(train['$/sqrft'])\n\n\nplt.subplots(figsize=(5,5))\nsns.distplot(train['$/sqrft'],fit=stats.norm)\n\n\n(mu,sigma)=stats.norm.fit(train['$/sqrft'])\n\nplt.legend(['Normal Distribution Params mu={} and sigma={}'.format(mu,sigma)],loc='best')\nplt.ylabel('frequency')\n\nfig=plt.figure()\nstats.probplot(train['$/sqrft'],plot=plt)\n\nplt.show()",
"_____no_output_____"
],
[
"train['sqrft']=np.log1p(train['sqrft'])\n\n\nplt.subplots(figsize=(5,5))\nsns.distplot(train['sqrft'],fit=stats.norm)\n\n\n(mu,sigma)=stats.norm.fit(train['sqrft'])\n\nplt.legend(['Normal Distribution Params mu={} and sigma={}'.format(mu,sigma)],loc='best')\nplt.ylabel('frequency')\n\nfig=plt.figure()\nstats.probplot(train['sqrft'],plot=plt)\n\nplt.show()",
"_____no_output_____"
],
[
"train['lot']=np.log1p(train['lot'])\n\n\nplt.subplots(figsize=(5,5))\nsns.distplot(train['lot'],fit=stats.norm)\n\n\n(mu,sigma)=stats.norm.fit(train['lot'])\n\nplt.legend(['Normal Distribution Params mu={} and sigma={}'.format(mu,sigma)],loc='best')\nplt.ylabel('frequency')\n\nfig=plt.figure()\nstats.probplot(train['lot'],plot=plt)\n\nplt.show()",
"_____no_output_____"
],
[
"train[cols].head()",
"_____no_output_____"
],
[
"x.head()",
"_____no_output_____"
],
[
"x=pd.get_dummies(x,columns=['zip','type','train_built'])\nx.head()",
"_____no_output_____"
],
[
"x.columns",
"_____no_output_____"
],
[
"from sklearn.model_selection import train_test_split\nx_train, x_test, y_train, y_test=train_test_split(x,y,test_size=0.20,random_state=42)\n\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.metrics import r2_score, mean_squared_error, accuracy_score",
"_____no_output_____"
],
[
"# define the model\nmodel = LinearRegression()\n\n# fit the model to training data\nmodel.fit(x_train, y_train)\n\n# predict\ny_train_pred = model.predict(x_train)\ny_test_pred = model.predict(x_test)",
"_____no_output_____"
],
[
"print(\"The R^2 score for training data is\", r2_score(y_train, y_train_pred))\nprint(\"The R^2 score for testing data is\", r2_score(y_test, y_test_pred))",
"The R^2 score for training data is 0.8443764429409215\nThe R^2 score for testing data is 0.7652239105023699\n"
],
[
"print(\"The train RMSE is \", mean_squared_error(y_train, y_train_pred)**0.5)\nprint(\"The test RMSE is \", mean_squared_error(y_test, y_test_pred)**0.5)",
"The train RMSE is 0.19174176633563975\nThe test RMSE is 0.20871471443293074\n"
],
[
"dff=pd.DataFrame({\"true_values\": y_train, \"predicted\": y_train_pred, \"residuals\": y_train - y_train_pred})\ndff",
"_____no_output_____"
]
],
[
[
"# Check normality of residuals for IV",
"_____no_output_____"
]
],
[
[
"plt.subplots(figsize=(5,5))\n# plt.subplots(1,2,sharex='none')\n# sns.distplot(dff['residuals'],fit=stats.norm)\n# plt.subplots(1,2,sharex='none')\n# stats.probplot(dff['residuals'],plot=plt)\n\n# fig, (ax1, ax2) = plt.subplots(ncols=2, sharex=False,sharey=False)\nsns.distplot(dff['residuals'],fit=stats.norm)\n(mu,sigma)=stats.norm.fit(dff['residuals'])\n\nplt.legend(['Normal Distribution Params mu={} and sigma={}'.format(mu,sigma)],loc='best')\nplt.ylabel('frequency')\n\nfig=plt.figure()\nstats.probplot(dff['residuals'],plot=plt)\n\nplt.show()",
"_____no_output_____"
],
[
"dff['true_values'].max()",
"_____no_output_____"
],
[
"from sklearn.linear_model import Lasso, Ridge, ElasticNet",
"_____no_output_____"
],
[
"# define the model\nlasso = Lasso(random_state=42)\n\n# fir the model to the data\nlasso.fit(x_train, y_train)\n\n# predictions\ny_pred_lasso = lasso.predict(x_test)\n\nRMSE_lasso = mean_squared_error(y_test, y_pred_lasso)**0.5\nr2_lasso = r2_score(y_test, y_pred_lasso)\n\nprint(RMSE_lasso)\nprint(r2_lasso)",
"0.26545426372512426\n0.62022432934567\n"
],
[
"# define the model\nridge = Ridge(random_state=42)\n\n# fir the model to the data\nridge.fit(x_train, y_train)\n\ny_train_pred=ridge.predict(x_train) ##this one\n# predictions\ny_pred_ridge = ridge.predict(x_test)\n\nRMSE_ridge = mean_squared_error(y_test, y_pred_ridge)**0.5\nr2_ridge = r2_score(y_test, y_pred_ridge)\n\nRMSE_ridge_train = mean_squared_error(y_train, y_train_pred)**0.5 #this\nr2_train=r2_score(y_train, y_train_pred) #this\n\nprint(RMSE_ridge)\nprint(r2_ridge)\n\nprint(RMSE_ridge_train)\nprint(r2_train)",
"0.20404662904129794\n0.775608409237964\n0.1925948888994726\n0.8429885207328893\n"
],
[
"ridge",
"_____no_output_____"
],
[
"np.expm1(model.predict(x_test.iloc[0].values.reshape(1,-1)))",
"_____no_output_____"
],
[
"x_test.iloc[0]",
"_____no_output_____"
],
[
"np.expm1(y_test.iloc[0])",
"_____no_output_____"
],
[
"# import pickle\n# ridge_pickle_t = open(\"costamesa_model.pkl\",\"wb\")\n# pickle.dump(ridge, ridge_pickle_t)",
"_____no_output_____"
],
[
"ridge_model = open(\"costamesa_model.pkl\",\"rb\")\nridge = pickle.load(ridge_model)",
"_____no_output_____"
],
[
"beds = []\nbaths = []\nsqrft = []\nlot = []\n# per_sqrft = []\nzipcode = \"\"\ntypes = \"\"\nyear_built=\"\"\n\nbeds.append(input(\"Bedrooms: \"))\nbaths.append(input(\"Bathrooms: \"))\nsqrft.append(input(\"Squarefeet: \"))\nlot.append(input(\"Lot Size: \"))\n# per_sqrft.append(input(\"$'s per Square Feet': \"))\ncity=input(\"City: \")\nzipcode = input(\"Zipcode: \")\ntypes = input(\"House Type: \")\nyear_built=input(\"Built: \")\n",
"Bedrooms: 3\nBathrooms: 2\nSquarefeet: 2000\nLot Size: 3000\nCity: costa mesa\nZipcode: 92626\nHouse Type: sfr\nBuilt: 2000\n"
],
[
"int_year_built=int(year_built)\n",
"_____no_output_____"
],
[
"# def min_built():\n# infile=open('irvine_data.pk1','rb')\n# train=pickle.load(infile)\n# f=open('whatever')\n# train_tust=pickle.load(f)\n \n# #for integrating: load all pickle files\n# #output is a list of minimums\n \n# irvine_mini=train['built'].min()\n# tustin_mini=train_tustin['built'].min()\n \n# return [irvine_mini,tustin_mini]\ntemp=min_built()\ndef temp_bin(num):\n temp_yr_bin=round((num-temp)/10,0)\n return temp_yr_bin\n\n# def binned_year(num):\n \n# minimums=min_built()\n \n# if city==\"Irvine\" or city==\"irvine\":\n# city_min=minimum[0]\n# elif city==\"tustin\" or 'Tustin'\n# city_min=minimum[1]\n# #etc\n\n# binned_yr=round((num-city_min)/10,0)\n \n# return binned_yr\nprint(temp_bin(int_year_built))\nprint(type(temp_bin(int_year_built)))",
"9.0\n<class 'float'>\n"
],
[
"user_dictionary={'zip':zipcode,'type':types,'train_built':str(temp_bin(int_year_built)),'beds':beds,'baths':baths,'sqrft':sqrft,'lot':lot}\nuser_df=pd.DataFrame(user_dictionary)\nuser_df_fit=pd.get_dummies(user_df,columns=['zip','type','train_built'])",
"_____no_output_____"
],
[
"type(user_dictionary['train_built'])",
"_____no_output_____"
],
[
"user_df_fit",
"_____no_output_____"
],
[
"x.columns\n\nfor i in x.columns:\n if i in user_df_fit.columns:\n pass\n else:\n user_df_fit[i]=0\n\n\nuser_df_fit",
"_____no_output_____"
],
[
"user_df_fit.columns",
"_____no_output_____"
],
[
"x.columns",
"_____no_output_____"
],
[
"# np.expm1(ridge.predict(user_df_fit))\nnp.expm1(ridge.predict(user_df_fit))\n\n\n# np.expm1(model.predict(x_test.iloc[0].values.reshape(1,-1)))",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a65ebf115773a659d5b2050663ebf74b80cfd28
| 51,904 |
ipynb
|
Jupyter Notebook
|
calaccess-exploration/late-contributions.ipynb
|
california-civic-data-coalition/python-calacess-notebooks
|
7c824d77fca49ad98f118df474fbedcfe3b09116
|
[
"MIT"
] | 21 |
2016-10-03T14:42:19.000Z
|
2020-08-03T01:50:04.000Z
|
calaccess-exploration/late-contributions.ipynb
|
california-civic-data-coalition/python-calacess-notebooks
|
7c824d77fca49ad98f118df474fbedcfe3b09116
|
[
"MIT"
] | 4 |
2016-09-24T03:40:17.000Z
|
2017-02-13T15:59:36.000Z
|
calaccess-exploration/late-contributions.ipynb
|
california-civic-data-coalition/python-calacess-notebooks
|
7c824d77fca49ad98f118df474fbedcfe3b09116
|
[
"MIT"
] | 8 |
2016-10-02T17:14:15.000Z
|
2022-01-14T00:29:29.000Z
| 29.143178 | 481 | 0.312654 |
[
[
[
"# Late contributions Received and Made",
"_____no_output_____"
],
[
"## Setup",
"_____no_output_____"
]
],
[
[
"%load_ext sql",
"_____no_output_____"
],
[
"from django.conf import settings\nconnection_string = 'postgresql+psycopg2://{USER}:{PASSWORD}@{HOST}:{PORT}/{NAME}'.format(\n **settings.DATABASES['default']\n)\n%sql $connection_string",
"_____no_output_____"
]
],
[
[
"## Unique Composite Key",
"_____no_output_____"
],
[
"The documentation says that the records are unique on the following fields:\n* `FILING_ID`\n* `AMEND_ID`\n* `LINE_ITEM`\n* `REC_TYPE`\n* `FORM_TYPE`\n\n`REC_TYPE` is always the same value: `S497`, so we can ignore this column. \n\n`FORM_TYPE` is either `F497P1` or `F497P2`, indicating in whether itemized transaction is listed under Part 1 (Contributions Received) or Part 2 (Contributions Made). I'll split these up into separate tables.",
"_____no_output_____"
],
[
"## Are the `S497_CD` records actually unique on `FILING_ID`, `AMEND_ID` and `LINE_ITEM`?",
"_____no_output_____"
],
[
"Yes. And this is even true across the Parts 1 and 2 (Contributions Received and Contributions Made).",
"_____no_output_____"
]
],
[
[
"%%sql\nSELECT \"FILING_ID\", \"AMEND_ID\", \"LINE_ITEM\", COUNT(*)\nFROM \"S497_CD\"\nGROUP BY 1, 2, 3\nHAVING COUNT(*) > 1\nORDER BY COUNT(*) DESC;",
"0 rows affected.\n"
]
],
[
[
"## `TRAN_ID`",
"_____no_output_____"
],
[
"The `S497_CD` table includes a `TRAN_ID` field, which the [documentation](http://calaccess.californiacivicdata.org/documentation/calaccess-files/s497-cd/#fields) describes as a \"Permanent value unique to this item\".",
"_____no_output_____"
],
[
"### Is `TRAN_ID` ever `NULL` or blank?",
"_____no_output_____"
],
[
"No.",
"_____no_output_____"
]
],
[
[
"%%sql\nSELECT COUNT(*)\nFROM \"S497_CD\"\nWHERE \"TRAN_ID\" IS NULL OR \"TRAN_ID\" = '' OR \"TRAN_ID\" = '0';",
"1 rows affected.\n"
]
],
[
[
"### Is `TRAN_ID` unique across filings?",
"_____no_output_____"
],
[
"Decidedly no.",
"_____no_output_____"
]
],
[
[
"%%sql\nSELECT \"TRAN_ID\", COUNT(DISTINCT \"FILING_ID\") \nFROM \"S497_CD\"\nGROUP BY 1\nHAVING COUNT(DISTINCT \"FILING_ID\") > 1\nORDER BY COUNT(DISTINCT \"FILING_ID\") DESC\nLIMIT 100;",
"100 rows affected.\n"
]
],
[
[
"But `TRAN_ID` does appear to be unique within each filing amendment, and appears to be reused for each filing.",
"_____no_output_____"
]
],
[
[
"%%sql\nSELECT \"FILING_ID\", \"TRAN_ID\", COUNT(DISTINCT \"AMEND_ID\") AS amend_count, COUNT(*) AS row_count\nFROM \"S497_CD\"\nGROUP BY 1, 2\nORDER BY COUNT(*) DESC\nLIMIT 100;",
"100 rows affected.\n"
]
],
[
[
"There's one exception:",
"_____no_output_____"
]
],
[
[
"%%sql\nSELECT \"FILING_ID\", \"TRAN_ID\", \"AMEND_ID\", COUNT(*)\nFROM \"S497_CD\"\nGROUP BY 1, 2, 3\nHAVING COUNT(*) > 1;",
"1 rows affected.\n"
]
],
[
[
"Looks like this `TRAN_ID` is duplicated across the two parts of the filing. So it was both a contribution both made and received?",
"_____no_output_____"
]
],
[
[
"%%sql\nSELECT *\nFROM \"S497_CD\"\nWHERE \"FILING_ID\" = 2072379\nAND \"TRAN_ID\" = 'EXP9671';",
"2 rows affected.\n"
]
],
[
[
"Looking at the [PDF for the filing](http://cal-access.ss.ca.gov/PDFGen/pdfgen.prg?filingid=2072379&amendid=1), it appears to be a check from the California Psychological Association PAC to the McCarty for Assembly 2016 committee, which was given and returned on 8/25/2016.\n\nRegardless, because the combinations of `FILING_ID`, `AMEND_ID` and `TRAN_ID` are unique within each part of the Schedule 497, we could substitute `TRAN_ID` for `LINE_ITEM` in the composite key when splitting up the contributions received from the contributions made.\n\nThe advantage is that the `TRAN_ID` purportedly points to the same contribution from one amendment to the next, whereas the same `LINE_ITEM` might not because the filers don't necessarily list transactions on the same line from one filing amendment to the next.\n\nHere's an example: On the [original Schedule 497 filing](http://cal-access.ss.ca.gov/PDFGen/pdfgen.prg?filingid=2083478&amendid=0) for Steven Bradford for Senate 2016, a $8,500.00 contribution from an AFL-CIO sub-committee is listed on line 1. But on the [first](http://cal-access.ss.ca.gov/PDFGen/pdfgen.prg?filingid=2083478&amendid=1) and [second](http://cal-access.ss.ca.gov/PDFGen/pdfgen.prg?filingid=2083478&amendid=2) amendments to the filing, it is listed on line 4.\n",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
4a65f3e885c521106dd0a07843504b9c88a968b8
| 11,491 |
ipynb
|
Jupyter Notebook
|
notebooks/basics.ipynb
|
rmarcontel/ib_insync
|
b5d040b014850b0819af7b45bba7fae8d069137c
|
[
"BSD-2-Clause"
] | null | null | null |
notebooks/basics.ipynb
|
rmarcontel/ib_insync
|
b5d040b014850b0819af7b45bba7fae8d069137c
|
[
"BSD-2-Clause"
] | null | null | null |
notebooks/basics.ipynb
|
rmarcontel/ib_insync
|
b5d040b014850b0819af7b45bba7fae8d069137c
|
[
"BSD-2-Clause"
] | null | null | null | 32.831429 | 1,836 | 0.610391 |
[
[
[
"# Basics\n\nLet's first take a look at what's inside the ``ib_insync`` package:",
"_____no_output_____"
]
],
[
[
"import ib_insync\nprint(ib_insync.__all__)",
"['util', 'Event', 'SoftDollarTier', 'PriceIncrement', 'Execution', 'CommissionReport', 'BarDataList', 'RealTimeBarList', 'BarData', 'RealTimeBar', 'HistogramData', 'NewsProvider', 'DepthMktDataDescription', 'ScannerSubscription', 'ScanDataList', 'FundamentalRatios', 'ExecutionFilter', 'PnL', 'PnLSingle', 'AccountValue', 'TickData', 'TickByTickAllLast', 'TickByTickBidAsk', 'TickByTickMidPoint', 'HistoricalTick', 'HistoricalTickBidAsk', 'HistoricalTickLast', 'TickAttrib', 'TickAttribBidAsk', 'TickAttribLast', 'MktDepthData', 'DOMLevel', 'TradeLogEntry', 'FamilyCode', 'SmartComponent', 'PortfolioItem', 'Position', 'Fill', 'OptionComputation', 'OptionChain', 'Dividends', 'NewsArticle', 'HistoricalNews', 'NewsTick', 'NewsBulletin', 'ConnectionStats', 'Contract', 'Stock', 'Option', 'Future', 'ContFuture', 'Forex', 'Index', 'CFD', 'Commodity', 'Bond', 'FuturesOption', 'MutualFund', 'Warrant', 'Bag', 'Crypto', 'TagValue', 'ComboLeg', 'DeltaNeutralContract', 'ContractDetails', 'ContractDescription', 'ScanData', 'Trade', 'Order', 'OrderStatus', 'OrderState', 'OrderComboLeg', 'LimitOrder', 'MarketOrder', 'StopOrder', 'StopLimitOrder', 'BracketOrder', 'OrderCondition', 'ExecutionCondition', 'MarginCondition', 'TimeCondition', 'PriceCondition', 'PercentChangeCondition', 'VolumeCondition', 'Ticker', 'IB', 'Client', 'RequestError', 'Wrapper', 'FlexReport', 'FlexError', 'IBC', 'IBController', 'Watchdog']\n"
]
],
[
[
"### Importing\nThe following two lines are used at the top of all notebooks. The first line imports everything and the second\nstarts an event loop to keep the notebook live updated:",
"_____no_output_____"
]
],
[
[
"from ib_insync import *\nutil.startLoop()",
"_____no_output_____"
]
],
[
[
"*Note that startLoop() only works in notebooks, not in regular Python programs.*",
"_____no_output_____"
],
[
"### Connecting\nThe main player of the whole package is the \"IB\" class. Let's create an IB instance and connect to a running TWS/IBG application:",
"_____no_output_____"
]
],
[
[
"ib = IB()\nib.connect('127.0.0.1', 4002, clientId=12)",
"_____no_output_____"
]
],
[
[
"If the connection failed, then verify that the application has the API port enabled and double-check the hostname and port. For IB Gateway the default port is 4002. Make sure the clientId is not already in use.\n\nIf the connection succeeded, then ib will be synchronized with TWS/IBG. The \"current state\" is now available via methods such as ib.positions(), ib.trades(), ib.openTrades(), ib.accountValues() or ib.tickers(). Let's list the current positions:",
"_____no_output_____"
]
],
[
[
"ib.positions()",
"_____no_output_____"
]
],
[
[
"Or filter the account values to get the liquidation value:",
"_____no_output_____"
]
],
[
[
"[v for v in ib.accountValues() if v.tag == 'NetLiquidationByCurrency' and v.currency == 'BASE']",
"_____no_output_____"
]
],
[
[
"The \"current state\" will automatically be kept in sync with TWS/IBG. So an order fill will be added as soon as it is reported, or account values will be updated as soon as they change in TWS.",
"_____no_output_____"
],
[
"### Contracts\n\nContracts can be specified in different ways:\n* The ibapi way, by creating an empty Contract object and setting its attributes one by one;\n* By using Contract and giving the attributes as keyword argument;\n* By using the specialized Stock, Option, Future, Forex, Index, CFD, Commodity,\n Bond, FuturesOption, MutualFund or Warrant contracts.\n\nSome examples:",
"_____no_output_____"
]
],
[
[
"Contract(conId=270639)\nStock('AMD', 'SMART', 'USD')\nStock('INTC', 'SMART', 'USD', primaryExchange='NASDAQ')\nForex('EURUSD')\nCFD('IBUS30')\nFuture('ES', '20180921', 'GLOBEX')\nOption('SPY', '20170721', 240, 'C', 'SMART')\nBond(secIdType='ISIN', secId='US03076KAA60');",
"_____no_output_____"
]
],
[
[
"### Sending a request\n\nThe IB class has nearly all request methods that the IB API offers. The methods that return a result will block until finished and then return the result. Take for example reqContractDetails:",
"_____no_output_____"
]
],
[
[
"contract = Stock('TSLA', 'SMART', 'USD')\nib.reqContractDetails(contract)",
"_____no_output_____"
]
],
[
[
"### Current state vs request\n\nDoing a request involves network traffic going up and down and can take considerable time. The current state on the other hand is always immediately available. So it is preferable to use the current state methods over requests. For example, use ``ib.openOrders()`` in preference over ``ib.reqOpenOrders()``, or ``ib.positions()`` over ``ib.reqPositions()``, etc:",
"_____no_output_____"
]
],
[
[
"%time l = ib.positions()",
"CPU times: user 5 µs, sys: 3 µs, total: 8 µs\nWall time: 9.06 µs\n"
],
[
"%time l = ib.reqPositions()",
"CPU times: user 0 ns, sys: 745 µs, total: 745 µs\nWall time: 32.7 ms\n"
]
],
[
[
"### Logging\n\nThe following will put log messages of INFO and higher level under the current active cell:",
"_____no_output_____"
]
],
[
[
"util.logToConsole()",
"_____no_output_____"
]
],
[
[
"To see all debug messages (including network traffic):",
"_____no_output_____"
]
],
[
[
"import logging\nutil.logToConsole(logging.DEBUG)",
"_____no_output_____"
]
],
[
[
"### Disconnecting\n\nThe following will disconnect ``ib`` and clear all its state:",
"_____no_output_____"
]
],
[
[
"ib.disconnect()",
"2019-12-31 13:28:29,252 ib_insync.ib INFO Disconnecting from 127.0.0.1:7497, 160 B sent in 9 messages, 21.0 kB received in 418 messages, session time 920 ms.\n2019-12-31 13:28:29,255 ib_insync.client INFO Disconnecting\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a65fea1e631e48086d3002a7670c9ab06e611a5
| 47,348 |
ipynb
|
Jupyter Notebook
|
solutions/Practical 2. More loops and conditionals.ipynb
|
eleanorrosalind/DEES_programming_course
|
d887aa661c3d1832b8577acdb14c8becbd09b877
|
[
"CC0-1.0"
] | null | null | null |
solutions/Practical 2. More loops and conditionals.ipynb
|
eleanorrosalind/DEES_programming_course
|
d887aa661c3d1832b8577acdb14c8becbd09b877
|
[
"CC0-1.0"
] | null | null | null |
solutions/Practical 2. More loops and conditionals.ipynb
|
eleanorrosalind/DEES_programming_course
|
d887aa661c3d1832b8577acdb14c8becbd09b877
|
[
"CC0-1.0"
] | 1 |
2020-02-28T21:37:43.000Z
|
2020-02-28T21:37:43.000Z
| 57.461165 | 17,676 | 0.706809 |
[
[
[
"# Practical 2 - Loops and conditional statements\n\nIn today's practical we are going to continue practicing working with loops whilst also moving on to the use of conditional statements.\n\n<div class=\"alert alert-block alert-success\">\n<b>Objectives:</b> The objectives of todays practical are:\n\n - 1) [Loops: FOR loops continued](#Part1)\n * [Exercise 1: Cycling through arrays and modifying values](#Exercise1)\n - 2) [Conditional statements: IF, ELSE and ELIF](#Part2)\n * [Exercise 2: Modify a loop to implement one of two equations according to a condition being met](#Exercise2)\n - 3) [Nested loops: Working with more than 1 dimension](#Part3)\n * [Exercise 3: Print out the result from a nested loop according to a condition being met](#Exercise3)\n * [Exercise 4: Print out which variables match a condition](#Exercise4)\n * [Exercise 5: Repeat Bob Newby's code breaking nested loops to crack the code in the Hawkins lab](#Exercise5)\n\nPlease note that you should not feel pressured to complete every exercise in class. These practicals are designed for you to take outside of class and continue working on them. Proposed solutions to all exercises can be found in the 'Solutions' folder.\n</div>",
"_____no_output_____"
],
[
"<div class=\"alert alert-block alert-warning\">\n<b>Please note:</b> After reading the instructions and aims of any exercise, search the code snippets for a note that reads ------'INSERT CODE HERE'------ to identify where you need to write your code \n</div>",
"_____no_output_____"
],
[
"## 1) Loops: FOR loops continued <a name=\"Part1\">\n\nLet us jump straight into our first exercise, following on from the previous practical.\n\n<div class=\"alert alert-block alert-success\">\n<b> Exercise 1: Cycling through arrays and modifying values. <a name=\"Exercise1\"> </b> Create a loop that implements the function:\n\n\\begin{eqnarray} \nY = X^{2.8/X}\n\\end{eqnarray}\n\nWhere x is an array of 75 values from 10 to 85. Print the final 4 values of the X and Y array one-by-one. Your output should look something like:\n\n```python\nThe 72nd element of x is 82\nThe 72nd element of y is 1.1623843156991178\nThe 73rd element of x is 83\nThe 73rd element of y is 1.1607534518329277\nThe 74th element of x is 84\nThe 74th element of y is 1.1591580160090038\nThe 75th element of x is 85\nThe 75th element of y is 1.157596831308393\n```\n</div>\n\n",
"_____no_output_____"
]
],
[
[
"# Initiliase an empty list for both 'x' and 'y'\nx = []\ny = []\n\n# Now loop through 75 values and append each list accordingly.\n# One list contained values for 'x', the other 'y'.\n# Please note the operator ** is needed to raise one number to another [e.g 2**3]\n#------'INSERT CODE HERE'------\nfor step in range(75):\n \n # Append a value to our x array\n x.append(10+(step+1))\n \n # Append a value to our y array\n y.append(x[step]**(2.8/x[step]))\n#------------------------------\n \n# Print the last four values from both our x and y arrays\nprint(\"The 72nd element of x is \",x[71])\nprint(\"The 72nd element of y is \",y[71])\n\nprint(\"The 73rd element of x is \",x[72])\nprint(\"The 73rd element of y is \",y[72])\n\nprint(\"The 74th element of x is \",x[73])\nprint(\"The 74th element of y is \",y[73])\n\nprint(\"The 75th element of x is \",x[74])\nprint(\"The 75th element of y is \",y[74])\n\n",
"The 72nd element of x is 82\nThe 72nd element of y is 1.1623843156991178\nThe 73rd element of x is 83\nThe 73rd element of y is 1.1607534518329277\nThe 74th element of x is 84\nThe 74th element of y is 1.1591580160090038\nThe 75th element of x is 85\nThe 75th element of y is 1.157596831308393\n"
]
],
[
[
"## 2) Conditional statements: The IF, ELIF and ELSE statements <a name=\"Part2\">\n\nOnce we have information stored in an array, or wish to generate information iteratively, we start to use a combination of **loops** and **conditional statements**. Conditional statements allow us to develop software that can be responsive to certain conditions. For example, in the following control flow diagram we define a set of instructions that initiate a variable and start a loop that adds 3 to this variable at each iteration. However, at each iteration we also check the value of said variable and if it becomes equals or greater than 30, we stop the program.\n\n\n\nThe following table lists the Python equivalent of common mathematical symbols to check numerical values.\n\n| Meaning | Math Symbol | Python Symbols |\n| --- | --- | --- |\n| Less than\t | < | < | \n| Greater than\t | > | > | \n| Less than or equal | ≤ | <= | \n| Greater than or equal | ≥ | >= | \n| Equals | = | == | \n| Not equal | ≠ | != | \n\n<div class=\"alert alert-block alert-danger\">\n<b> Warning </b> The obvious choice for equals, a single equal sign, is not used to check for equality. It is a common error to use only one equal sign when you mean to test for equality, and not make an assignment!\n</div>\n\nHow do we implement checks using these symbols? This is where we use the IF, ELIF and ELSE statements. Let us start with an example. \n\n```python\n# Initialise two variables with integer values\nx=3\ny=5\n# Now use an IF statement to check the relative values of x and y, then act accordingly\nif x > y:\n print(\"X is greater than Y\")\n... \nif x < y:\n print(\"X is less than Y\")\n...\"X is less than Y\"\n```\n\nOnce again, notice how we have introduced a statement that ends with a colon : and thus requires the next line to be indented. We also use specific symbols to check whether one value is greater than [>] or less than [<] another. Within each condition check, depending on which is true, we print a message to the screen.\n\nRather than use two IF statements, we could combine these checks using an ELSE statement as follows:\n\n```python\n# Initialise two variables with integer values\nx=3\ny=5\n# Now use an IF statement to check the relative values of x and y, then act accordingly\nif x > y:\n print(\"X is greater than Y\") \nelse x < y:\n print(\"X is less than Y\")\n\"X is less than Y\"\n```\n\nThere are a huge number of examples we could work on here, but to begin lets build on the first exercise. In the following code we again have two variables 'x' and 'y'. Each has 50 elements. Lets assume that we want to implement two functions: one that is used if our x value is *less than or equal* to 20, the other if x is *greater than* 20. We can use a combination of the IF and ELSE statements.\n\n - If $X$ is *less than or equal* to 20, $ Y = \\frac{X}{12.5} $\n - Otherwise [else], $Y = X^{12.5} $\n\nLets see this implemented as code below. Read through the syntax and if you do not understand, please ask. \n\n\n<div class=\"alert alert-block alert-danger\">\n<b>Indexing </b> Once again, notice how we have introduced a statement that ends with a colon : and thus requires the next line to be indented. \n</div>\n",
"_____no_output_____"
]
],
[
[
"# Initiliase an empty list for both 'x' and 'y'\nx = []\ny = []\n\nfor step in range(50):\n\n # Append a value to our x array\n x.append(step+1)\n \n # Now add a conditional statement to check the value of x\n # Notice our additional indentation\n if x[step] <= 20:\n # Append a value to our y array\n y.append(x[step]/12.5)\n else:\n # Append a value to our y array\n y.append(x[step]**12.5)\n \n# Print the first and last four values from both our x and y arrays\n\n# First four\nprint(\"The 1st element of x is \",x[0])\nprint(\"The 1st element of y is \",y[0])\n\nprint(\"The 2nd element of x is \",x[1])\nprint(\"The 2nd element of y is \",y[1])\n\nprint(\"The 3rd element of x is \",x[2])\nprint(\"The 3rd element of y is \",y[2])\n\nprint(\"The 4th element of x is \",x[3])\nprint(\"The 4th element of y is \",y[3])\n\n# Last four\nprint(\"The 47th element of x is \",x[46])\nprint(\"The 47th element of y is \",y[46])\n\nprint(\"The 48th element of x is \",x[47])\nprint(\"The 48th element of y is \",y[47])\n\nprint(\"The 49th element of x is \",x[48])\nprint(\"The 49th element of y is \",y[48])\n\nprint(\"The 50th element of x is \",x[49])\nprint(\"The 50th element of y is \",y[49])\n",
"The 1st element of x is 1\nThe 1st element of y is 0.08\nThe 2nd element of x is 2\nThe 2nd element of y is 0.16\nThe 3rd element of x is 3\nThe 3rd element of y is 0.24\nThe 4th element of x is 4\nThe 4th element of y is 0.32\nThe 47th element of x is 47\nThe 47th element of y is 7.965686757032835e+20\nThe 48th element of x is 48\nThe 48th element of y is 1.0363715136605023e+21\nThe 49th element of x is 49\nThe 49th element of y is 1.341068619663965e+21\nThe 50th element of x is 50\nThe 50th element of y is 1.7263349150062194e+21\n"
]
],
[
[
" - put exercise here",
"_____no_output_____"
],
[
"### The AND statment\n\nOnce we move beyond two mutually exclusive conditions, we can also use the ELIF statements. However we need to be careful that we are assigning correct boundaries on our conditions. For example, let us assume we have been tasked with creating an array X that contains values from 1 to 200 and we want to implement 3 equations according to the following rules:\n \n - If X is less than 20, use: $ Y = \\frac{X}{1.56} $\n - If X is greater than or equal to 20, but less than 60 use: $ Y = X^{0.35} $ \n - If X is greater than or equal to 60 use: $ Y = 4.5*X $ \n \nLook at the following two different versions of a loop using the conditional statements introduced earlier.\n\n```python\n# Version 1\nif x[step] < 20:\n <<action>> \nelif x[step] >= 20:\n <<action>>\nelif x[step] >= 60\n <<action>>\n```\n```python\n# Version 2\nif x[step] < 20:\n <<action>> \nelif x[step] >= 20 and x[step] < 60:\n <<action>>\nelif x[step] >= 60\n <<action>>\n```\n\nThe first version will work, but produce incorrect results. Why is that? If you follow the code instructions, as the Python interpreter would, once x[step] is greater than 20 the second conditional statement will always be true. As a result, it will never have to move to the third. In the second version however, the second conditional will no longer be true once x[step] is greater than or equal to 60. \n\nLet us run both versions and plot the results so you can see the difference. In the following code I will create two Y arrays, one for each loop variant. A line plot will be produced where you should see a step change in values according to these rules. Do not worry about the syntax or module used to create the plot, we will visit this throughout the course.",
"_____no_output_____"
]
],
[
[
"# Initiliase an empty list for both 'x' and 'y'\nx = []\ny_version1 = []\ny_version2 = []\n\nfor step in range(200):\n\n # Append a value to our x array\n x.append(step+1)\n \n # Version 1\n if x[step] < 20:\n # Append a value to our y array\n y_version1.append(x[step]/1.56)\n elif x[step] >= 20:\n # Append a value to our y array\n y_version1.append(x[step]**0.35)\n elif x[step] >= 60:\n y_version1.append(4.5*x[step])\n \n # Version 2\n if x[step] < 20:\n # Append a value to our y array\n y_version2.append(x[step]/1.56)\n elif x[step] >= 20 and x[step] < 60:\n # Append a value to our y array\n y_version2.append(x[step]**0.35)\n elif x[step] >= 60:\n y_version2.append(4.5*x[step])\n \n# Plot results\nimport matplotlib.pyplot as plt # Import Matplotlib so we can plot results\nimport numpy as np # The Numpy package - more soon!!\nfig = plt.figure(figsize=(8,8))\nax = plt.axes()\nax.plot(np.array(x),np.log(y_version1),label='Version 1')\nax.plot(np.array(x),np.log(y_version2),label='Version 2')\nax.set_title('Y as a function of X')\nax.legend(loc='upper left')\nax.set_ylabel('Y')\nax.set_xlabel('X')\n\n",
"_____no_output_____"
]
],
[
[
"<div class=\"alert alert-block alert-success\">\n<b> Exercise 2: Modify a loop to implement one of three equations according to a condition being met <a name=\"Exercise2\"> </b> In this case, let us assume an array X contains values from 1 to 1000 and we want to implement 3 equations according to the following rules:\n \n - If X is less than or equal to 250, $ Y = X^{1.03} $\n - If X is greater than 250, but less than 690 $ Y = X^{1.25} $ \n - If X is greater than or equal to 690, $ Y = X^{1.3} $ \n\nThis is the first graph we have created. Dont worry about the syntax for now, we will produce graphs in every practical following this.\n\nYour output should look like the following:\n\n\n\n\n</div>\n\n\n",
"_____no_output_____"
]
],
[
[
"# Initiliase an empty list for both 'x' and 'y'\nx = []\ny = []\n\nfor step in range(1000):\n\n # Append a value to our x array\n x.append(step+1)\n \n #------'INSERT CODE HERE'------\n # Now add a conditional statement to check the value of x\n # Notice our additional indentation\n if x[step] <= 250:\n # Append a value to our y array\n y.append(x[step]**1.03)\n elif x[step] > 250 and x[step] < 690:\n y.append(x[step]**1.2)\n elif x[step] >= 690:\n # Append a value to our y array\n y.append(x[step]**2.5)\n #------------------------------\n \n# Print the first and last four values from both our x and y arrays\n\n#Import plotting package\nimport matplotlib.pyplot as plt # Import Matplotlib so we can plot results\nimport numpy as np # The Numpy package - more soon!!\nfig = plt.figure(figsize=(8,8))\nax = plt.axes()\nax.plot(np.array(x),np.log(y))\nax.set_title('Y as a function of X')\nax.set_ylabel('Y')\nax.set_xlabel('X')",
"_____no_output_____"
]
],
[
[
"## 3) Nested loops: Working with more than 1 dimension <a name=\"Part3\">\n\nIn many we want to work with more than one variable at a time, often in a two [or more] dimensional setting. We can combine 'FOR' loops on any number of levels. For example, take the following hypothetical example:\n\n```python\nfor [first iterating variable] in [outer loop]: # Outer loop\n [do something] # Optional\n for [second iterating variable] in [nested loop]: # Nested loop\n [do something] \n```\n\nNotice how we have what we might call our first, or 'outer' loop cycling through our first iterating variable. As we cycle through this variable, we then 'do something' as a direct consequence. However, directly following this action, we cycle through a second iterating variable as part of our 'nested loop'. In other words, we have a loop that is nested within our first, or outer. \n\n<div class=\"alert alert-block alert-danger\">\n<b>Indexing </b> Once again, notice how we have introduced a statement that ends with a colon : and thus requires the next line to be indented. \n</div>\n\nLet us run an example of cycling through a list of words. In this case we are not using the \n```python \nrange() \n```\nfunction as we are not dealing with numeric examples or cycling through integers.",
"_____no_output_____"
]
],
[
[
"# Create two lists of words\nlist1 = ['Hello','Goodbye']\nlist2 = ['George','Frank','Susan','Sarah']\n\nfor word1 in list1:\n for word2 in list2:\n print(word1)\n print(word2)\n ",
"Hello\nGeorge\nHello\nFrank\nHello\nSusan\nHello\nSarah\nGoodbye\nGeorge\nGoodbye\nFrank\nGoodbye\nSusan\nGoodbye\nSarah\n"
]
],
[
[
"Turns out we can make the output easier to read by adding each word together, with a space ' ' in between as:",
"_____no_output_____"
]
],
[
[
"# Create two lists of words\nlist1 = ['Hello','Goodbye']\nlist2 = ['George','Frank','Susan','Sarah']\n\nfor word1 in list1:\n for word2 in list2:\n print(word1+' '+word2)",
"Hello George\nHello Frank\nHello Susan\nHello Sarah\nGoodbye George\nGoodbye Frank\nGoodbye Susan\nGoodbye Sarah\n"
]
],
[
[
"We will not be deadling with the rich text processing power of Python in this course, but if you are interested there are some great examples to follow on the [internet](https://towardsdatascience.com/gentle-start-to-natural-language-processing-using-python-6e46c07addf3). The important lesson here is noticing how we deal with a nested loop. Please also note that we can use a 'FOR' loop to iterate on members of any list, whether they are numeric or string values.\n\nAgain we can use conditional statements to modify our output. What if we only wanted to output entries involving Susan? We can add a conditional statement as follows: ",
"_____no_output_____"
]
],
[
[
"# Create two lists of words\nlist1 = ['Hello','Goodbye']\nlist2 = ['George','Frank','Susan','Sarah']\n\nfor word1 in list1:\n for word2 in list2:\n if word2 == \"Susan\":\n print(word1+' '+word2)",
"Hello Susan\nGoodbye Susan\n"
]
],
[
[
"<div class=\"alert alert-block alert-success\">\n<b> Exercise 3: Print out the result from a nested loop according to a condition being met <a name=\"Exercise3\"> </b> \n \nIn this exercise we have three lists with the following entries:\n \n list1 = ['Maths','Physics','Programming','Chemistry']\n list2 = ['is','can be','is not'] \n list3 = ['enjoyable','awful!','ok, I guess','....'] \n\nYour task is to create a triple nested loop and only print out when the word in list1 is 'Physics' and list2 is 'can be'.\nThere are multiple ways to achieve this.\n\nYour results should look like the following:\n\n```python\nPhysics can be enjoyable\nPhysics can be awful!\nPhysics can be ok, I guess\nPhysics can be ....\n```\n\n</div>\n\n",
"_____no_output_____"
]
],
[
[
"# Create three lists of words\n\n#------'INSERT CODE HERE'------\nlist1 = ['Maths','Physics','Programming','Chemistry']\nlist2 = ['is','can be','is not'] \nlist3 = ['enjoyable','awful!','ok, I guess','....'] \n\nfor word1 in list1:\n for word2 in list2:\n for word3 in list3:\n if word1 == \"Physics\" and word2 == \"can be\":\n print(word1+' '+word2+' '+word3)\n#------------------------------",
"Physics can be enjoyable\nPhysics can be awful!\nPhysics can be ok, I guess\nPhysics can be ....\n"
]
],
[
[
"<div class=\"alert alert-block alert-success\">\n<b> Exercise 4: Print out which variables match a condition <a name=\"Exercise4\"> </b> \n \nIn this exercise we have two variables, 'x' and 'y' taking on a value from two loops that cycle through 80 values.\n\n```python\nfor x in range(80):\n for y in range(80):\n [do something]\n```\n\nYour task is to identify which combinations of x and y through the function:\n\n\\begin{eqnarray} \nZ = Y+X^{2}\n\\end{eqnarray}\n\nproduce a value of Z = 80\n\n</div>\n",
"_____no_output_____"
]
],
[
[
"#------'INSERT CODE HERE'------\nfor x in range(80):\n for y in range(80):\n z = y + x**2.0\n if z == 80:\n print('x = ', x)\n print('y = ', y)\n#------------------------------",
"x = 1\ny = 79\nx = 2\ny = 76\nx = 3\ny = 71\nx = 4\ny = 64\nx = 5\ny = 55\nx = 6\ny = 44\nx = 7\ny = 31\nx = 8\ny = 16\n"
]
],
[
[
"<div class=\"alert alert-block alert-success\">\n<b> Exercise 5: Repeat Bob Newby's code breaking nested loops to crack the code in the Hawkins lab <a name=\"Exercise5\"> </b> \n \nIn this exercise we imagine that we are tasked with finding the value of a password that is different everytime the program is executed. This will be generated by an internal Python function and used to create a string which has 5 numbers in it. We then have to create a 5 level nested loop to combine 5 different numbers into one word and when this matches the one generated by the internal Python function the attempted, thus correct, password is printed to the screen.\n\nThe code box below provides you with indentend lines in which to enter the rest of the code required. The first loop is provided. As part of the 5th loop you will need to combine all of thje individual numbers, as strings, into one word and then check if this is the same as the internally generated password. You can use the following commands for this, assuming that you call each letter as letter1, letter2 etc.\n\n```python\npassword_attempt = letter1+letter2+letter3+letter4+letter5\nif password_attempt == password_string:\n print(\"Passwords match!, attempted password = \",password_attempt)\n```\n\nOnce you have finished, why not see how many steps have been taken to arrive at the correct password? \n\n</div>",
"_____no_output_____"
]
],
[
[
"# The following imports a module [see Practical 3] and then creates a string of a random number of 5 digits\nfrom random import randint\nn=5\npassword_string = ''.join([\"{}\".format(randint(0, 9)) for num in range(0, 5)])\nprint(\"password = \", password_string)\n\n# Now create a 5 level nested loop which prints when the successful password has been met\n\n#------'INSERT CODE HERE'------\n\n# First loop\nfor step1 in range(10):\n letter1 = str(step1) # Convert number to a string\n # Second loop\n for step2 in range(10):\n letter2 = str(step2)\n # Third loop\n for step3 in range(10):\n letter3 = str(step3)\n # Fourth loop\n for step4 in range(10):\n letter4 = str(step4)\n # Fifth loop\n for step5 in range(10):\n letter5 = str(step5)\n password_attempt = letter1+letter2+letter3+letter4+letter5\n \n if password_attempt == password_string:\n print(\"Passwords match!, attempted password = \",password_attempt)\n \n#------------------------------",
"password = 00101\nPasswords match!, attempted password = 00101\n99999\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
4a660223a8278db505d828a29a2ab006303d09cf
| 763,936 |
ipynb
|
Jupyter Notebook
|
Horse Race Data - Race time prediction/Step3_Regression.ipynb
|
vszuflita/Metis
|
bc374d1a1bdd89b2a6bebe770c3ac5bd9ace00fe
|
[
"Apache-2.0"
] | 3 |
2018-06-28T16:20:21.000Z
|
2022-02-26T03:40:07.000Z
|
Horse Race Data - Race time prediction/Step3_Regression.ipynb
|
vszuflita/Metis
|
bc374d1a1bdd89b2a6bebe770c3ac5bd9ace00fe
|
[
"Apache-2.0"
] | null | null | null |
Horse Race Data - Race time prediction/Step3_Regression.ipynb
|
vszuflita/Metis
|
bc374d1a1bdd89b2a6bebe770c3ac5bd9ace00fe
|
[
"Apache-2.0"
] | null | null | null | 116.011541 | 205,532 | 0.793924 |
[
[
[
"## Imports",
"_____no_output_____"
]
],
[
[
"from __future__ import print_function, division\n\nimport pandas as pd\nimport numpy as np\nimport statsmodels.api as sm\nimport statsmodels.formula.api as smf\nimport patsy\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nimport scipy.stats as stats\n%matplotlib inline\n\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.linear_model import RidgeCV\nfrom sklearn.preprocessing import PolynomialFeatures\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.pipeline import Pipeline\nfrom sklearn import cross_validation\nfrom sklearn.cross_validation import train_test_split\nfrom sklearn.cross_validation import cross_val_score\nfrom sklearn.model_selection import KFold\nfrom sklearn.linear_model import Ridge\nfrom sklearn.linear_model import ElasticNet\nfrom sklearn.linear_model import Lasso\nfrom sklearn.linear_model import ElasticNetCV\nfrom sklearn.linear_model import LassoCV\nfrom sklearn.linear_model import RidgeCV\nfrom sklearn.metrics import mean_squared_error as MSE",
"_____no_output_____"
]
],
[
[
"## Reading and preparing the df",
"_____no_output_____"
]
],
[
[
"horsey = pd.read_csv('finalmerged_clean').drop('Unnamed: 0', axis=1)",
"_____no_output_____"
]
],
[
[
"#### Smaller data set (maiden females)",
"_____no_output_____"
]
],
[
[
"MaidenFems = horsey.iloc[42:49]\nMaidenFems",
"_____no_output_____"
]
],
[
[
"#### Larger data set (without maiden females)",
"_____no_output_____"
]
],
[
[
"horse_fast = horsey.drop(horsey.index[42:49]).reset_index(drop=True)\nhorse_fast",
"_____no_output_____"
],
[
"horse_fast = horse_fast.drop('Final_Time',1).drop('Horse Name',1)\nhorse_fast",
"_____no_output_____"
]
],
[
[
"## Splitting into Master Test-Train",
"_____no_output_____"
]
],
[
[
"ttest = horse_fast.iloc[[1,5,10,15,20,25,30,35,40,45,50]].reset_index(drop=True)",
"_____no_output_____"
],
[
"ttrain = horse_fast.drop(axis = 0, index = [1,5,10,15,20,25,30,35,40,45,50]).sample(frac=1).reset_index(drop=True)",
"_____no_output_____"
],
[
"ttrain",
"_____no_output_____"
],
[
"y_ttrain = ttrain['Final_Time_Hund'] \ny_ttest = ttest['Final_Time_Hund'] #extract dependent variable\n\nX_ttrain = ttrain.drop('Final_Time_Hund',1)\nX_ttest = ttest.drop('Final_Time_Hund',1) # Get rid of ind. variables",
"_____no_output_____"
]
],
[
[
"## Testing Assumptions",
"_____no_output_____"
],
[
"Didn't complete for sake of time",
"_____no_output_____"
],
[
"#### Assumption 1",
"_____no_output_____"
]
],
[
[
"XAssum = X_ttrain\nyAssum = y_ttrain",
"_____no_output_____"
],
[
"XAssum_train, XAssum_test, yAssum_train, yAssum_test = train_test_split(XAssum, yAssum, test_size=0.2)",
"_____no_output_____"
],
[
"def diagnostic_plot(x, y):\n plt.figure(figsize=(20,5))\n \n rgr = LinearRegression()\n rgr.fit(XAssum_train, yAssum_train)\n pred = rgr.predict(XAssum_test, yAssum_test)\n\n#Regression plot\n plt.subplot(1, 3, 1)\n plt.scatter(XAssum_train,yAssum_train)\n plt.plot(XAssum_train, pred, color='blue',linewidth=1)\n plt.title(\"Regression fit\")\n plt.xlabel(\"x\")\n plt.ylabel(\"y\")\n \n \n#Residual plot (true minus predicted)\n plt.subplot(1, 3, 2)\n res = yAssum_train - pred\n plt.scatter(pred, res)\n plt.title(\"Residual plot\")\n plt.xlabel(\"prediction\")\n plt.ylabel(\"residuals\")\n \n#A Q-Q plot (for the scope of today), it's a percentile, percentile plot. When the predicted and actual distributions\n#are the same, they Q-Q plot has a diagonal 45degree line. When stuff diverges, the kertosis between predicted and actual are different, \n#your line gets wonky. \n plt.subplot(1, 3, 3)\n #Generates a probability plot of sample data against the quantiles of a \n # specified theoretical distribution \n stats.probplot(res, dist=\"norm\", plot=plt)\n plt.title(\"Normal Q-Q plot\")\n\ndiagnostic_plot(XAssum_train, yAssum_train)\n\n\nmodelA = ElasticNet(1, l1_ratio=.5)\nfit = modelA.fit(XAssum_train, yAssum_train)\nrsq = fit.score(XAssum_train, yAssum_train)\nadj_rsq = 1 - (1-rsq)*(len(yAssum_train)-1)/(len(yAssum_train)-XAssum_train.shape[1]-1)\n\nprint(rsq)\nprint(adj_rsq)",
"_____no_output_____"
]
],
[
[
"#### Assumption 2",
"_____no_output_____"
]
],
[
[
"# develop OLS with Sklearn\nX = ttrain[1:]\ny = ttrain[0] # predictor\n\nlr = LinearRegression()\nfit = lr.fit(X,y)\n\nt['predict']=fit.predict(X)\ndata['resid']=data.cnt-data.predict\nwith sns.axes_style('white'):\n plot=data.plot(kind='scatter',\n x='predict',y='resid',alpha=0.2,figsize=(10,6))",
"_____no_output_____"
]
],
[
[
"## Model 0 - Linear Regression",
"_____no_output_____"
],
[
"Working with the training data that doesn't include the maiden-filly race. ",
"_____no_output_____"
]
],
[
[
"horsey = ttrain\n",
"_____no_output_____"
],
[
"Xlin = X_ttrain\nylin = y_ttrain",
"_____no_output_____"
]
],
[
[
"#### Regplots",
"_____no_output_____"
]
],
[
[
"sns.regplot('Gender','Final_Time_Hund', data=horsey);\n#Makes sense! Male horses tend to be a little faster. ",
"_____no_output_____"
],
[
"sns.regplot('Firsts','Final_Time_Hund', data=horsey);\n#Makes sense! Horses that have won more races tend to be faster. ",
"_____no_output_____"
],
[
"sns.regplot('Seconds','Final_Time_Hund', data=horsey);\n#Similar to the result for \"firsts\", but slightly less apparent.",
"_____no_output_____"
],
[
"sns.regplot('Thirds','Final_Time_Hund', data=horsey);\n#Similar to the results above. ",
"_____no_output_____"
],
[
"sns.regplot('PercentWin','Final_Time_Hund', data=horsey);\n#Not a great correlation...",
"_____no_output_____"
],
[
"sns.regplot('Starts','Final_Time_Hund', data=horsey);\n#This seems pretty uncorrelated...",
"_____no_output_____"
],
[
"sns.regplot('Date','Final_Time_Hund', data=horsey);\n#Horses with more practice have faster times. But pretty uncorrelated... ",
"_____no_output_____"
],
[
"sns.regplot('ThreeF','Final_Time_Hund', data=horsey);\n#Really no correlation!",
"_____no_output_____"
],
[
"sns.regplot('FourF','Final_Time_Hund', data=horsey);\n#Huh, not great either. ",
"_____no_output_____"
],
[
"sns.regplot('FiveF','Final_Time_Hund', data=horsey);\n#Slower practice time means slower finaltime. But yeah... pretty uncorrelated...",
"_____no_output_____"
]
],
[
[
"#### Correlations",
"_____no_output_____"
]
],
[
[
"horsey.corr()",
"_____no_output_____"
],
[
"%matplotlib inline\nimport matplotlib\nmatplotlib.rcParams[\"figure.figsize\"] = (12, 10)",
"_____no_output_____"
],
[
"sns.heatmap(horsey.corr(), vmin=-1,vmax=1,annot=True, cmap='seismic');",
"_____no_output_____"
]
],
[
[
"Pretty terrible... but it seems like FiveF, Date, Gender and Percent win are the best... (in that order). ",
"_____no_output_____"
]
],
[
[
"sns.pairplot(horsey, size = 1.2, aspect=1.5);",
"_____no_output_____"
],
[
"plt.hist(horsey.Final_Time_Hund);",
"_____no_output_____"
]
],
[
[
"#### Linear Regression (All inputs)",
"_____no_output_____"
]
],
[
[
"#Gotta add the constant... without it my r^2 was 1.0!\nXlin = sm.add_constant(Xlin)\n#Creating the model\nlin_model = sm.OLS(ylin,Xlin)\n# Fitting the model to the training set\nfit_lin = lin_model.fit()\n# Print summary statistics of the model's performance\nfit_lin.summary()",
"_____no_output_____"
]
],
[
[
"- r2 could be worse...\n- adj r2 also could be worse...\n- Inputs that seem significant based on pvalue : Gender... that's about it! The other lowests are Firsts, seconds and date (though they're quite crappy). But I guess if 70% of data lies within the level of confidence... that's better than none...",
"_____no_output_____"
],
[
"** TESTING! **",
"_____no_output_____"
]
],
[
[
"Xlin = X_ttrain\nylin = y_ttrain",
"_____no_output_____"
],
[
"lr_train = LinearRegression()\nlr_fit = lr_train.fit(Xlin, ylin)\n\nr2_training = lr_train.score(Xlin, ylin)\nr2adj_training = 1 - (1-r2_training)*(len(ylin)-1)/(len(ylin)-Xlin.shape[1]-1)\n\npreds = lr_fit.predict(X_ttest)\nrmse = np.sqrt(MSE(y_ttest, preds))\n\nprint('R2:', r2_training)\nprint('R2 Adjusted:', r2adj_training)\nprint('Output Predictions', preds)\nprint('RMSE:', rmse)\n",
"R2: 0.4319523569243877\nR2 Adjusted: 0.23607385931210767\nOutput Predictions [9571.51155428 9584.37729715 9569.09795451 9564.66413762 9454.58227211\n 9522.1853719 9566.59489312 9566.16649405 9568.44364943 9605.75899683\n 9654.15466177]\nRMSE: 134.49558155404648\n"
]
],
[
[
"#### Linear Regression (Updated Inputs)",
"_____no_output_____"
],
[
"Below is the best combination of features to drop: Thirds, ThreeF & PrecentWin",
"_____no_output_____"
]
],
[
[
"Xlin2 = Xlin.drop(labels ='Thirds', axis = 1).drop(labels ='ThreeF', axis = 1).drop(labels ='PercentWin', axis = 1)\nylin2 = y_ttrain",
"_____no_output_____"
],
[
"#Gotta add the constant... without it my r^2 was 1.0!\nXlin2 = sm.add_constant(Xlin2)\n\n#Creating the model\nlin_model = sm.OLS(ylin,Xlin2)\n\n# Fitting the model to the training set\nfit_lin = lin_model.fit()\n\n# Print summary statistics of the model's performance\nfit_lin.summary()",
"_____no_output_____"
]
],
[
[
"Slightly better...",
"_____no_output_____"
],
[
"## Model A - Elastic Net (no frills)",
"_____no_output_____"
]
],
[
[
"## Establishing x and y\n\nXA = X_ttrain\nyA = y_ttrain",
"_____no_output_____"
],
[
"#Checking the predictability of the model with this alpha = 1\nmodelA = ElasticNet(1, l1_ratio=.5)\nfit = modelA.fit(XA, yA)\nrsq = fit.score(XA, yA)\nadj_rsq = 1 - (1-rsq)*(len(yA)-1)/(len(yA)-XA.shape[1]-1)\n\n\nprint(rsq)\nprint(adj_rsq)",
"0.30733402796483356\n0.06848369278029331\n"
]
],
[
[
"** 0.3073 ** not great... but not terrible. 30% of the variance is explained by the model. ",
"_____no_output_____"
]
],
[
[
"#Let's see if I play around with the ratios of L1 and L2\n\nmodelA = ElasticNet(1, l1_ratio=.2)\nfit = modelA.fit(XA, yA)\nrsq = fit.score(XA, yA)\nadj_rsq = 1 - (1-rsq)*(len(yA)-1)/(len(yA)-XA.shape[1]-1)\n\nprint(rsq)\nprint(adj_rsq)",
"0.2773051493067136\n0.02810002837799408\n"
]
],
[
[
"** Looks slightly worse. I guess there wasn't much need to compress complexity, or fix colinearity. **",
"_____no_output_____"
]
],
[
[
"#Let's check it in the other direction, with L1 getting more weight.\n\nmodelA = ElasticNet(1, l1_ratio=.98)\nfit = modelA.fit(XA, yA)\nrsq = fit.score(XA, yA)\nadj_rsq = 1 - (1-rsq)*(len(yA)-1)/(len(yA)-XA.shape[1]-1)\n\nprint(rsq)\nprint(adj_rsq)\n",
"0.41903203286967117\n0.21869825110059227\n"
]
],
[
[
"** Seems like l1 of 0.98 really takes the cake! Let's check out alpha... Might be worth it to switch to a \nLasso model... something to keep in mind**",
"_____no_output_____"
]
],
[
[
"#Let's see if we can find a better alpha...\n\nkf = KFold(n_splits=5, shuffle = True, random_state = 40 )\n\nalphas = [1e-9,1e-8,1e-7,1e-6,1e-5,1e-4,1e-3,1e-2,1e-1,1,10,100,1000,10000, 100000, 1000000]\n#alphas = [0,.001,.01,.1,.2,.5,.9,1,5,10,50,100,1000,10000]\nerrors = []\nfor i in alphas:\n err_list = []\n for train_index, test_index in kf.split(XA):\n #print(\"TRAIN:\", train_index, \"TEST:\", test_index) #This gives the index of the rows you're training and testing. \n XA_train, XA_test = XA.loc[train_index], XA.loc[test_index]\n yA_train, yA_test = yA[train_index], yA[test_index]\n \n ef = ElasticNet(i, l1_ratio = 0.5)\n ef.fit(XA_train,yA_train)\n #print(ef.coef_) #This prints the coefficients of each of the input variables. \n preds = ef.predict(XA_test) #Predictions for the y value. \n error = np.sqrt(MSE(preds,yA_test))\n err_list.append(error)\n \n error = np.mean(err_list)\n errors.append(error)\n print(\"The RMSE for alpha = {0} is {1}\".format(i,error))",
"The RMSE for alpha = 1e-09 is 91.81676048874472\nThe RMSE for alpha = 1e-08 is 91.81675800679318\nThe RMSE for alpha = 1e-07 is 91.81673318723008\nThe RMSE for alpha = 1e-06 is 91.81648500108626\nThe RMSE for alpha = 1e-05 is 91.81400415413242\nThe RMSE for alpha = 0.0001 is 91.78929653852856\nThe RMSE for alpha = 0.001 is 91.55190229188295\nThe RMSE for alpha = 0.01 is 89.86247747686414\nThe RMSE for alpha = 0.1 is 86.18239990702807\nThe RMSE for alpha = 1 is 86.22685788137446\nThe RMSE for alpha = 10 is 85.58936962368537\nThe RMSE for alpha = 100 is 80.68484780710189\nThe RMSE for alpha = 1000 is 79.81833497094951\nThe RMSE for alpha = 10000 is 79.81833497094951\nThe RMSE for alpha = 100000 is 79.81833497094951\nThe RMSE for alpha = 1000000 is 79.81833497094951\n"
]
],
[
[
"** Looks like the best alpha is around 1000! Lets see if we can get even more granular. **",
"_____no_output_____"
]
],
[
[
"kf = KFold(n_splits=5, shuffle = True, random_state = 40)\n\nalphas = [500, 600, 800, 900, 1000, 1500, 2000, 3000]\n#alphas = [0,.001,.01,.1,.2,.5,.9,1,5,10,50,100,1000,10000]\nerrors = []\nfor i in alphas:\n err_list = []\n for train_index, test_index in kf.split(XA):\n #print(\"TRAIN:\", train_index, \"TEST:\", test_index) #This gives the index of the rows you're training and testing. \n XA_train, XA_test = XA.loc[train_index], XA.loc[test_index]\n yA_train, yA_test = yA[train_index], yA[test_index]\n \n ef = ElasticNet(i)\n ef.fit(XA_train,yA_train)\n #print(ef.coef_) #This prints the coefficients of each of the input variables. \n preds = ef.predict(XA_test) #Predictions for the y value. \n error = np.sqrt(MSE(preds,yA_test))\n err_list.append(error)\n \n error = np.mean(err_list)\n errors.append(error)\n print(\"The RMSE for alpha = {0} is {1}\".format(i,error))",
"The RMSE for alpha = 500 is 79.93218785178335\nThe RMSE for alpha = 600 is 79.86798894741898\nThe RMSE for alpha = 800 is 79.81833497094951\nThe RMSE for alpha = 900 is 79.81833497094951\nThe RMSE for alpha = 1000 is 79.81833497094951\nThe RMSE for alpha = 1500 is 79.81833497094951\nThe RMSE for alpha = 2000 is 79.81833497094951\nThe RMSE for alpha = 3000 is 79.81833497094951\n"
]
],
[
[
"** I'm going to settle on an alpha of 800 **",
"_____no_output_____"
]
],
[
[
"#Checking the predictability of the model again with the new alpha of 90. \nmodelA = ElasticNet(alpha = 800)\nfit = modelA.fit(XA, yA)\nfit.score(XA, yA)",
"_____no_output_____"
]
],
[
[
"Hm. Not really sure what that did, but definitely didn't work...",
"_____no_output_____"
],
[
"** TESTING **",
"_____no_output_____"
],
[
"Doing ElasticNetCV (withouth any modifications)",
"_____no_output_____"
]
],
[
[
"## Letting it do it's thing on it's own.\n\nencvA = ElasticNetCV()\nfitA = encvA.fit(XA, yA)\n\nr2_training = encvA.score(XA, yA)\ny= np.trim_zeros(encvA.fit(XA,yA).coef_)\n#r2adj_training = 1 - (1-r2_training)*(XA.shape[1]-1)/(XA.shape[1]-len(y)-1)\nadj_rsq = 1 - (1-r2_training)*(len(XA)-1)/(len(XA)-XA.shape[1]-len(y)-1)\n\npreds = fitA.predict(X_ttest)\nrmse = np.sqrt(MSE(preds, y_ttest))\n\nprint('R2:', r2_training)\nprint('R2 Adjusted:', adj_rsq)\nprint('Output Predictions', preds)\nprint('RMSE:', rmse)\nprint('Alpha:',encvA.alpha_)\nprint('L1:',encvA.l1_ratio_)\nprint('Coefficients:',fitA.coef_)\n\n",
"R2: 0.1250239164325775\nR2 Adjusted: -0.7960035399541829\nOutput Predictions [9568.44703874 9544.25401996 9569.26138184 9558.15080173 9500.44511137\n 9506.81075853 9578.79823457 9536.96686145 9508.45334023 9520.20310548\n 9541.95653301]\nRMSE: 146.62930213115092\nAlpha: 19.39130187288461\nL1: 0.5\nCoefficients: [ 0.14406805 -1.53266832 -1.23154879 -0.34943161 -0. -0.\n -1.09796857 -0.0641983 1.12444581 1.63415956]\n"
],
[
"elastic_coef = encvA.fit(XA, yA).coef_\n_ = plt.bar(range(len(XA.columns)), elastic_coef)\n_ = plt.xticks(range(len(XA.columns)), XA.columns, rotation=45)\n_ = plt.ylabel('Coefficients')\nplt.show()",
"_____no_output_____"
]
],
[
[
"Doing ElasticNet CV - changing the l1 ratio",
"_____no_output_____"
]
],
[
[
"encvA2 = ElasticNetCV(l1_ratio = .99)\nfitA2 = encvA2.fit(XA, yA)\n\nr2_training = encvA2.score(XA, yA)\ny= np.trim_zeros(encvA2.fit(XA,yA).coef_)\nadj_rsq = 1 - (1-r2_training)*(len(XA)-1)/(len(XA)-XA.shape[1]-len(y)-1)\n\npreds = fitA2.predict(X_ttest)\nrmse = np.sqrt(MSE(y_ttest, preds))\n\nprint('R2:', r2_training)\nprint('R2 Adjusted:', adj_rsq)\nprint('Output Predictions', preds)\nprint('RMSE:', rmse)\nprint('Alpha:',encvA2.alpha_)\nprint('L1:',encvA2.l1_ratio_)\nprint('Coefficients:',fitA.coef_)",
"R2: 0.13490409373110013\nR2 Adjusted: -0.686937017224355\nOutput Predictions [9569.73379751 9544.83798636 9569.55519996 9559.76238991 9499.72243453\n 9505.65263375 9580.09286092 9538.58865853 9508.72077406 9524.05346858\n 9544.148973 ]\nRMSE: 145.8223278150644\nAlpha: 22.624455747388357\nL1: 0.99\nCoefficients: [ 0.14406805 -1.53266832 -1.23154879 -0.34943161 -0. -0.\n -1.09796857 -0.0641983 1.12444581 1.63415956]\n"
],
[
"elastic_coef = encvA2.fit(XA, yA).coef_\n_ = plt.bar(range(len(XA.columns)), elastic_coef)\n_ = plt.xticks(range(len(XA.columns)), XA.columns, rotation=45)\n_ = plt.ylabel('Coefficients')\nplt.show()",
"_____no_output_____"
]
],
[
[
"### Extras",
"_____no_output_____"
]
],
[
[
"## L1 is 0.98\n\nencvA2 = ElasticNetCV(l1_ratio = 0.98)\nfitA2 = encvA2.fit(XA_train, yA_train)\n\n\nrsq = fitA2.score(XA_test, yA_test)\nadj_rsq = 1 - (1-rsq)*(len(yA)-1)/(len(yA)-XA.shape[1]-1)\n\npreds = fitA2.predict(XA_test)\nmserror = np.sqrt(MSE(preds,yA_test))\nprint(rsq)\nprint(adj_rsq)\nprint(preds)\nprint(mserror)\nprint(encvA2.alpha_)\nprint(encvA2.l1_ratio_)\n\n",
"-0.7699761779104741\n-1.3803127909830515\n[9530.22201358 9557.78660356 9568.12085351 9515.09437083 9408.69257225\n 9539.52109345 9552.02503325 9592.0898741 ]\n106.13663706955026\n13.708083498640713\n0.98\n"
]
],
[
[
"Still weird... ",
"_____no_output_____"
]
],
[
[
"## Trying some alphas... \n\nencvA3 = ElasticNetCV(alphas = [80,800,1000])\nfitA3 = encvA3.fit(XA_train, yA_train)\n\n\nrsq = fitA3.score(XA_test, yA_test)\nadj_rsq = 1 - (1-rsq)*(len(yA)-1)/(len(yA)-XA.shape[1]-1)\n\npreds = fitA3.predict(XA_test)\nmserror = np.sqrt(MSE(preds,yA_test))\nprint(rsq)\nprint(adj_rsq)\nprint(preds)\nprint(mserror)\nprint(encvA3.alpha_)\nprint(encvA3.l1_ratio_)",
"-0.03034522766804737\n-0.3856366854846154\n[9531.84786531 9537.12219259 9555.26771165 9537.26522148 9522.40946291\n 9540.5271364 9528.01989905 9550.6925083 ]\n80.97910904887202\n80\n0.5\n"
]
],
[
[
"Still confused... ",
"_____no_output_____"
],
[
"## Model B - Elastic Net (polynomial transformation)",
"_____no_output_____"
]
],
[
[
"## Establishing x and y\n\nXB = X_ttrain\nyB = y_ttrain",
"_____no_output_____"
],
[
"ModelB = make_pipeline(PolynomialFeatures(2), LinearRegression())\nfit = ModelB.fit(XB, yB)\nrsq = fit.score(XB, yB)\nadj_rsq = 1 - (1-rsq)*(len(yA)-1)/(len(yB)-XB.shape[1]-1)\n\nprint(rsq)\nprint(adj_rsq)\n",
"1.0\n1.0\n"
],
[
"ModelB = make_pipeline(PolynomialFeatures(3), ElasticNetCV(l1_ratio = .5))\nfit = ModelB.fit(XB, yB)\nrsq = fit.score(XB, yB)\nadj_rsq = 1 - (1-rsq)*(len(yA)-1)/(len(yB)-XB.shape[1]-1)\n\nprint(rsq)\nprint(adj_rsq)\n\n",
"/Users/vicky/anaconda3/lib/python3.6/site-packages/sklearn/linear_model/coordinate_descent.py:491: ConvergenceWarning: Objective did not converge. You might want to increase the number of iterations. Fitting data with very small alpha may cause precision problems.\n ConvergenceWarning)\n"
]
],
[
[
"... Hm ... Not great. But we'll test it anyway.",
"_____no_output_____"
],
[
"** TESTING **",
"_____no_output_____"
]
],
[
[
"encvB = make_pipeline(PolynomialFeatures(2), LinearRegression())\nfitB = encvB.fit(XB, yB)\n\n\nr2_training = encvB.score(X_ttest, y_ttest)\n#y= np.trim_zeros(encvB.fit(XB,yB).coef_)\n#r2adj_training = 1 - (1-r2_training)*(XB.shape[1]-1)/(XB.shape[1]-len(y)-1)\n\n\npreds = fitB.predict(X_ttest)\nrmse = np.sqrt(MSE(y_ttest, preds))\n\nprint('R2:', r2_training)\nprint('R2 Adjusted:', r2adj_training)\nprint('Output Predictions', preds)\nprint('RMSE:', rmse)\nprint('Alpha:',encvB_steps.elasticnetcv.alpha_)\nprint('L1:',encvB.named_steps.elasticnetcv.l1_ratio_)\n",
"R2: -6.44286573439161\nR2 Adjusted: 8.874784752106773\nOutput Predictions [ 9603.93575421 9626.20719928 9543.74011595 9648.26260264\n 9876.89530352 9057.03108162 9049.60166209 10380.37207319\n 9489.22416851 9204.46604812 9389.82733389]\nRMSE: 400.1589267503806\n"
],
[
"#Testing the predictability of the model with this alpha = 0.5\nXB_train, XB_test, yB_train, yB_test = train_test_split(XB, yB, test_size=0.2)",
"_____no_output_____"
],
[
"modelB = make_pipeline(PolynomialFeatures(2), ElasticNetCV(l1_ratio = .5))\nmodelB.fit(XB_train, yB_train)\nrsq = modelB.score(XB_train,yB_train)\nadj_rsq = 1 - (1-rsq)*(len(yB_train)-1)/(len(yB_train)-XB_train.shape[1]-1)\n\npreds = fitA3.predict(XB_test)\nmserror = np.sqrt(MSE(preds,yB_test))\nprint(rsq)\nprint(adj_rsq)\nprint(preds)\nprint(mserror)\nprint(modelB.named_steps.elasticnetcv.alpha_)\nprint(modelB.named_steps.elasticnetcv.l1_ratio_)",
"0.24223454866587957\n-0.11860614244560641\n[9520.38536911 9535.49478345 9538.04112195 9537.26522148 9535.89628111\n 9531.84786531 9534.06521828 9504.76352166]\n75.95291917851185\n36914.51228303739\n0.5\n"
]
],
[
[
"## Model C - Elastic Net CV with transformations",
"_____no_output_____"
],
[
"On second review, none of the inputs would benefit from transformations",
"_____no_output_____"
]
],
[
[
"C_train = ttrain",
"_____no_output_____"
],
[
"C_train['new_firsts_log']=np.log(C_train.Firsts)\n",
"/Users/vicky/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:1: RuntimeWarning: divide by zero encountered in log\n \"\"\"Entry point for launching an IPython kernel.\n"
],
[
"C_train\n#C_train.new_firsts_log.str.replace('-inf', '0')",
"_____no_output_____"
]
],
[
[
"## Predicting Today's Race!",
"_____no_output_____"
]
],
[
[
"todays_race = pd.read_csv('big_race_day').drop('Unnamed: 0', axis = 1).drop('Horse Name', axis =1)\n## today_race acting as testing x",
"_____no_output_____"
],
[
"todays_race",
"_____no_output_____"
]
],
[
[
"### Maiden Fems Prediction",
"_____no_output_____"
]
],
[
[
"ym_train = MaidenFems['Final_Time_Hund'] \nxm_train = MaidenFems.drop('Final_Time_Hund',1).drop('Horse Name',1).drop('Final_Time',1)",
"_____no_output_____"
],
[
"enMaid = ElasticNetCV(.90)\nfitMaid = enMaid.fit(xm_train, ym_train)\n\npreds = fitMaid.predict(todays_race)\n\nr2_training = enMaid.score(xm_train, ym_train)\ny= np.trim_zeros(enMaid.fit(xm_train,ym_train).coef_)\nadj_rsq = 1 - (1-r2_training)*(len(xm_train)-1)/(len(xm_train)-xm_train.shape[1]-len(y)-1)\n\n\nprint('Output Predictions', preds)\nprint('R2:', r2_training)\nprint('R2 Adjusted:', adj_rsq)\nprint('Alpha:',enMaid.alpha_)\nprint('L1:',enMaid.l1_ratio_)\nprint('Coefficients:',fitMaid.coef_)\n\n",
"Output Predictions [10116.53721999 10097.09521978 10063.73725849 10069.30996726\n 10055.90957387 10063.90529607 10073.93665433 10072.45966259\n 10092.43120946]\nR2: 0.5590130886374652\nR2 Adjusted: 1.4409869113625347\nAlpha: 131.68320065913423\nL1: 0.9\nCoefficients: [ 0. 0. -0. -0. 0. -0.\n 0. 0.09310739 2.16186848 0. ]\n"
],
[
"elastic_coef = enMaid.fit(xm_train, ym_train).coef_\n_ = plt.bar(range(len(xm_train.columns)), elastic_coef)\n_ = plt.xticks(range(len(xm_train.columns)), xm_train.columns, rotation=45)\n_ = plt.ylabel('Coefficients')\nplt.show()",
"_____no_output_____"
],
[
"finalguesses_Maiden = [{'Horse Name': 'Lady Lemon Drop' ,'Maiden Horse Guess': 10116.53721999},\n {'Horse Name': 'Curlins Prize' ,'Maiden Horse Guess': 10097.09521978},\n {'Horse Name': 'Luminoso' ,'Maiden Horse Guess':10063.11500294},\n {'Horse Name': 'Party Dancer' ,'Maiden Horse Guess': 10069.32339855},\n {'Horse Name': 'Bring on the Band' ,'Maiden Horse Guess': 10054.64900894},\n {'Horse Name': 'Rockin Ready' ,'Maiden Horse Guess': 10063.67940254},\n {'Horse Name': 'Rattle' ,'Maiden Horse Guess': 10073.93665433},\n {'Horse Name': 'Curlins Journey' ,'Maiden Horse Guess': 10072.45966259},\n {'Horse Name': 'Heaven Escape' ,'Maiden Horse Guess':10092.43120946}]",
"_____no_output_____"
]
],
[
[
"### EN-CV prediction",
"_____no_output_____"
]
],
[
[
"encvL = ElasticNetCV(l1_ratio = 0.99)\nfiten = encvL.fit(X_ttrain, y_ttrain)\n\npreds = fiten.predict(todays_race)\n\nr2_training = encvL.score(X_ttrain, y_ttrain)\ny = np.trim_zeros(encvL.fit(X_ttrain,y_ttrain).coef_)\nadj_rsq = 1 - (1-r2_training)*(len(X_ttrain)-1)/(len(X_ttrain)-X_ttrain.shape[1]-len(y)-1)\n\n\nprint('Output Predictions', preds)\nprint('R2:', r2_training)\nprint('R2 Adjusted:', adj_rsq)\nprint('Alpha:',encv.alpha_)\nprint('L1:',encv.l1_ratio_)\nprint('Coefficients:',fiten.coef_)\n\n",
"Output Predictions [9609.70585871 9645.82659915 9558.93257549 9564.01963654 9577.9212198\n 9556.46879067 9549.09508205 9546.58621572 9586.917829 ]\nR2: 0.13490409373110013\nR2 Adjusted: -0.686937017224355\nAlpha: 22.624455747388357\nL1: 0.99\nCoefficients: [ 0. -3.3306084 -1.00376514 -0. -0. -0.\n -1.1288019 -0. 1.04027195 1.72458346]\n"
],
[
"elastic_coef = encvL.fit(X_ttrain, y_ttrain).coef_\n_ = plt.bar(range(len(X_ttrain.columns)), elastic_coef)\n_ = plt.xticks(range(len(X_ttrain.columns)), X_ttrain.columns, rotation=45)\n_ = plt.ylabel('Coefficients')\nplt.show()",
"_____no_output_____"
],
[
"finalguesses_EN = [{'Horse Name': 'Lady Lemon Drop' ,'Guess': 9609.70585871},\n {'Horse Name': 'Curlins Prize' ,'Guess': 9645.82659915},\n {'Horse Name': 'Luminoso' ,'Guess':9558.93257549},\n {'Horse Name': 'Party Dancer' ,'Guess': 9564.01963654},\n {'Horse Name': 'Bring on the Band' ,'Guess': 9577.9212198},\n {'Horse Name': 'Rockin Ready' ,'Guess': 9556.46879067},\n {'Horse Name': 'Rattle' ,'Guess': 9549.09508205},\n {'Horse Name': 'Curlins Journey' ,'Guess': 9546.58621572},\n {'Horse Name': 'Heaven Escape' ,'Guess':9586.917829}]",
"_____no_output_____"
]
],
[
[
"### Linear Regression prediction",
"_____no_output_____"
]
],
[
[
"Xlin = X_ttrain\nylin = y_ttrain",
"_____no_output_____"
],
[
"lr = LinearRegression()\nlrfit = lr.fit(Xlin, ylin)\n\npreds = lrfit.predict(todays_race)\n\nr2_training = lr.score(Xlin, ylin)\nr2adj_training = 1 - (1-r2_training)*(len(ylin)-1)/(len(ylin)-Xlin.shape[1]-1)\n\n\nprint('Output Predictions', preds)\nprint('R2:', r2_training)\nprint('R2 Adjusted:', r2adj_training)\n\n",
"Output Predictions [9720.65585682 9746.17852003 9608.10444379 9633.58532183 9621.04698335\n 9561.82026773 9644.13062968 9666.24092249 9700.56665335]\nR2: 0.4319523569243877\nR2 Adjusted: 0.23607385931210767\n"
],
[
"elastic_coef = lrfit.fit(Xlin, ylin).coef_\n_ = plt.bar(range(len(Xlin.columns)), elastic_coef)\n_ = plt.xticks(range(len(Xlin.columns)), Xlin.columns, rotation=45)\n_ = plt.ylabel('Coefficients')\nplt.show()",
"_____no_output_____"
],
[
"finalguesses_Lin = [{'Horse Name': 'Lady Lemon Drop' ,'Guess': 9720.65585682},\n {'Horse Name': 'Curlins Prize' ,'Guess': 9746.17852003},\n {'Horse Name': 'Luminoso' ,'Guess':9608.10444379},\n {'Horse Name': 'Party Dancer' ,'Guess': 9633.58532183},\n {'Horse Name': 'Bring on the Band' ,'Guess': 9621.04698335},\n {'Horse Name': 'Rockin Ready' ,'Guess': 9561.82026773},\n {'Horse Name': 'Rattle' ,'Guess': 9644.13062968},\n {'Horse Name': 'Curlins Journey' ,'Guess': 9666.24092249},\n {'Horse Name': 'Heaven Escape' ,'Guess':9700.56665335}]",
"_____no_output_____"
]
],
[
[
"### Setting the data frames",
"_____no_output_____"
]
],
[
[
"GuessLin = pd.DataFrame(finalguesses_Lin)\nGuessMaid = pd.DataFrame(finalguesses_Maiden)\nGuessEN = pd.DataFrame(finalguesses_EN)",
"_____no_output_____"
],
[
"GuessLin.sort_values('Guess')",
"_____no_output_____"
],
[
"GuessMaid.sort_values('Maiden Horse Guess')",
"_____no_output_____"
],
[
"GuessEN.sort_values('Guess')",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
]
] |
4a6606ca82aecdfb295668f73df99c5853118fbe
| 190,393 |
ipynb
|
Jupyter Notebook
|
session3/assign_3_submission.ipynb
|
gantir/eip4
|
4be305025d5c00511bec8a5881fc91825c541209
|
[
"MIT"
] | null | null | null |
session3/assign_3_submission.ipynb
|
gantir/eip4
|
4be305025d5c00511bec8a5881fc91825c541209
|
[
"MIT"
] | null | null | null |
session3/assign_3_submission.ipynb
|
gantir/eip4
|
4be305025d5c00511bec8a5881fc91825c541209
|
[
"MIT"
] | null | null | null | 190.583584 | 53,394 | 0.828145 |
[
[
[
"%tensorflow_version 1.x \n\n#Suppress warnings which keep poping up\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nfrom keras import backend as K\nimport time\nimport matplotlib.pyplot as plt\nimport numpy as np\n% matplotlib inline\nnp.random.seed(2017) \nfrom keras.models import Sequential\nfrom keras.layers.convolutional import Convolution2D, MaxPooling2D, SeparableConvolution2D\nfrom keras.layers import Activation, Flatten, Dense, Dropout, AveragePooling2D\nfrom keras.layers.normalization import BatchNormalization\nfrom keras.utils import np_utils",
"Using TensorFlow backend.\n"
],
[
"from keras.datasets import cifar10\n(train_features, train_labels), (test_features, test_labels) = cifar10.load_data()\nnum_train, img_channels, img_rows, img_cols = train_features.shape\nnum_test, _, _, _ = test_features.shape\nnum_classes = len(np.unique(train_labels))",
"Downloading data from https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz\n170500096/170498071 [==============================] - 11s 0us/step\n"
],
[
"print(train_features.shape)",
"(50000, 32, 32, 3)\n"
],
[
"class_names = ['airplane','automobile','bird','cat','deer',\n 'dog','frog','horse','ship','truck']\nfig = plt.figure(figsize=(8,3))\nfor i in range(num_classes):\n ax = fig.add_subplot(2, 5, 1 + i, xticks=[], yticks=[])\n idx = np.where(train_labels[:]==i)[0]\n features_idx = train_features[idx,::]\n img_num = np.random.randint(features_idx.shape[0])\n im = features_idx[img_num]\n ax.set_title(class_names[i])\n plt.imshow(im)\nplt.show()",
"_____no_output_____"
],
[
"def plot_model_history(model_history):\n fig, axs = plt.subplots(1,2,figsize=(15,5))\n # summarize history for accuracy\n axs[0].plot(range(1,len(model_history.history['acc'])+1),model_history.history['acc'])\n axs[0].plot(range(1,len(model_history.history['val_acc'])+1),model_history.history['val_acc'])\n axs[0].set_title('Model Accuracy')\n axs[0].set_ylabel('Accuracy')\n axs[0].set_xlabel('Epoch')\n axs[0].set_xticks(np.arange(1,len(model_history.history['acc'])+1),len(model_history.history['acc'])/10)\n axs[0].legend(['train', 'val'], loc='best')\n # summarize history for loss\n axs[1].plot(range(1,len(model_history.history['loss'])+1),model_history.history['loss'])\n axs[1].plot(range(1,len(model_history.history['val_loss'])+1),model_history.history['val_loss'])\n axs[1].set_title('Model Loss')\n axs[1].set_ylabel('Loss')\n axs[1].set_xlabel('Epoch')\n axs[1].set_xticks(np.arange(1,len(model_history.history['loss'])+1),len(model_history.history['loss'])/10)\n axs[1].legend(['train', 'val'], loc='best')\n plt.show()",
"_____no_output_____"
],
[
"def accuracy(test_x, test_y, model):\n result = model.predict(test_x)\n predicted_class = np.argmax(result, axis=1)\n true_class = np.argmax(test_y, axis=1)\n num_correct = np.sum(predicted_class == true_class) \n accuracy = float(num_correct)/result.shape[0]\n return (accuracy * 100)",
"_____no_output_____"
],
[
"train_features = train_features.astype('float32')/255\ntest_features = test_features.astype('float32')/255\n# convert class labels to binary class labels\ntrain_labels = np_utils.to_categorical(train_labels, num_classes)\ntest_labels = np_utils.to_categorical(test_labels, num_classes)",
"_____no_output_____"
],
[
"# Define the model\nmodel = Sequential()\nmodel.add(Convolution2D(48, 3, 3, border_mode='same', input_shape=(32, 32, 3)))\nmodel.add(Activation('relu'))\nmodel.add(Convolution2D(48, 3, 3))\nmodel.add(Activation('relu'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Dropout(0.25))\nmodel.add(Convolution2D(96, 3, 3, border_mode='same'))\nmodel.add(Activation('relu'))\nmodel.add(Convolution2D(96, 3, 3))\nmodel.add(Activation('relu'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Dropout(0.25))\nmodel.add(Convolution2D(192, 3, 3, border_mode='same'))\nmodel.add(Activation('relu'))\nmodel.add(Convolution2D(192, 3, 3))\nmodel.add(Activation('relu'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Dropout(0.25))\nmodel.add(Flatten())\nmodel.add(Dense(512))\nmodel.add(Activation('relu'))\nmodel.add(Dropout(0.5))\nmodel.add(Dense(256))\nmodel.add(Activation('relu'))\nmodel.add(Dropout(0.5))\nmodel.add(Dense(num_classes, activation='softmax'))\n# Compile the model\nmodel.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:66: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:541: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:4432: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:4267: The name tf.nn.max_pool is deprecated. Please use tf.nn.max_pool2d instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:148: The name tf.placeholder_with_default is deprecated. Please use tf.compat.v1.placeholder_with_default instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:3733: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.\nInstructions for updating:\nPlease use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/optimizers.py:793: The name tf.train.Optimizer is deprecated. Please use tf.compat.v1.train.Optimizer instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:3576: The name tf.log is deprecated. Please use tf.math.log instead.\n\n"
],
[
"model.summary()",
"Model: \"sequential_1\"\n_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\nconv2d_1 (Conv2D) (None, 32, 32, 48) 1344 \n_________________________________________________________________\nactivation_1 (Activation) (None, 32, 32, 48) 0 \n_________________________________________________________________\nconv2d_2 (Conv2D) (None, 30, 30, 48) 20784 \n_________________________________________________________________\nactivation_2 (Activation) (None, 30, 30, 48) 0 \n_________________________________________________________________\nmax_pooling2d_1 (MaxPooling2 (None, 15, 15, 48) 0 \n_________________________________________________________________\ndropout_1 (Dropout) (None, 15, 15, 48) 0 \n_________________________________________________________________\nconv2d_3 (Conv2D) (None, 15, 15, 96) 41568 \n_________________________________________________________________\nactivation_3 (Activation) (None, 15, 15, 96) 0 \n_________________________________________________________________\nconv2d_4 (Conv2D) (None, 13, 13, 96) 83040 \n_________________________________________________________________\nactivation_4 (Activation) (None, 13, 13, 96) 0 \n_________________________________________________________________\nmax_pooling2d_2 (MaxPooling2 (None, 6, 6, 96) 0 \n_________________________________________________________________\ndropout_2 (Dropout) (None, 6, 6, 96) 0 \n_________________________________________________________________\nconv2d_5 (Conv2D) (None, 6, 6, 192) 166080 \n_________________________________________________________________\nactivation_5 (Activation) (None, 6, 6, 192) 0 \n_________________________________________________________________\nconv2d_6 (Conv2D) (None, 4, 4, 192) 331968 \n_________________________________________________________________\nactivation_6 (Activation) (None, 4, 4, 192) 0 \n_________________________________________________________________\nmax_pooling2d_3 (MaxPooling2 (None, 2, 2, 192) 0 \n_________________________________________________________________\ndropout_3 (Dropout) (None, 2, 2, 192) 0 \n_________________________________________________________________\nflatten_1 (Flatten) (None, 768) 0 \n_________________________________________________________________\ndense_1 (Dense) (None, 512) 393728 \n_________________________________________________________________\nactivation_7 (Activation) (None, 512) 0 \n_________________________________________________________________\ndropout_4 (Dropout) (None, 512) 0 \n_________________________________________________________________\ndense_2 (Dense) (None, 256) 131328 \n_________________________________________________________________\nactivation_8 (Activation) (None, 256) 0 \n_________________________________________________________________\ndropout_5 (Dropout) (None, 256) 0 \n_________________________________________________________________\ndense_3 (Dense) (None, 10) 2570 \n=================================================================\nTotal params: 1,172,410\nTrainable params: 1,172,410\nNon-trainable params: 0\n_________________________________________________________________\n"
],
[
"from keras.preprocessing.image import ImageDataGenerator\n\ndatagen = ImageDataGenerator(zoom_range=0.0, \n rotation_range=15,\n width_shift_range=0.1,\n height_shift_range=0.1,\n horizontal_flip=True)",
"_____no_output_____"
],
[
"# train the model\nstart = time.time()\n# Train the model\nmodel_info = model.fit_generator(datagen.flow(train_features, train_labels, batch_size = 128),\n samples_per_epoch = train_features.shape[0], nb_epoch = 50, \n validation_data = (test_features, test_labels), verbose=1)\nend = time.time()\nprint (\"Model took %0.2f seconds to train\"%(end - start))\n# plot model history\nplot_model_history(model_info)\n# compute test accuracy\nprint (\"Accuracy on test data is: %0.2f\"%accuracy(test_features, test_labels, model))",
"Epoch 1/50\n390/390 [==============================] - 21s 54ms/step - loss: 1.9240 - acc: 0.2516 - val_loss: 1.5537 - val_acc: 0.4113\nEpoch 2/50\n390/390 [==============================] - 20s 51ms/step - loss: 1.4521 - acc: 0.4669 - val_loss: 1.3400 - val_acc: 0.5068\nEpoch 3/50\n390/390 [==============================] - 20s 52ms/step - loss: 1.2348 - acc: 0.5587 - val_loss: 1.0685 - val_acc: 0.6183\nEpoch 4/50\n390/390 [==============================] - 20s 51ms/step - loss: 1.0859 - acc: 0.6166 - val_loss: 0.9279 - val_acc: 0.6732\nEpoch 5/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.9807 - acc: 0.6567 - val_loss: 0.8517 - val_acc: 0.7026\nEpoch 6/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.8949 - acc: 0.6907 - val_loss: 0.7744 - val_acc: 0.7336\nEpoch 7/50\n390/390 [==============================] - 20s 51ms/step - loss: 0.8364 - acc: 0.7121 - val_loss: 0.8505 - val_acc: 0.7107\nEpoch 8/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.7937 - acc: 0.7275 - val_loss: 0.7304 - val_acc: 0.7481\nEpoch 9/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.7485 - acc: 0.7432 - val_loss: 0.6922 - val_acc: 0.7575\nEpoch 10/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.7169 - acc: 0.7546 - val_loss: 0.6789 - val_acc: 0.7721\nEpoch 11/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.6942 - acc: 0.7637 - val_loss: 0.6435 - val_acc: 0.7737\nEpoch 12/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.6682 - acc: 0.7721 - val_loss: 0.6479 - val_acc: 0.7780\nEpoch 13/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.6527 - acc: 0.7779 - val_loss: 0.6381 - val_acc: 0.7814\nEpoch 14/50\n390/390 [==============================] - 20s 51ms/step - loss: 0.6359 - acc: 0.7823 - val_loss: 0.6547 - val_acc: 0.7754\nEpoch 15/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.6158 - acc: 0.7894 - val_loss: 0.6626 - val_acc: 0.7787\nEpoch 16/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.6031 - acc: 0.7929 - val_loss: 0.6309 - val_acc: 0.7917\nEpoch 17/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.5840 - acc: 0.8032 - val_loss: 0.6417 - val_acc: 0.7845\nEpoch 18/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.5699 - acc: 0.8056 - val_loss: 0.6014 - val_acc: 0.7945\nEpoch 19/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.5577 - acc: 0.8096 - val_loss: 0.6371 - val_acc: 0.7903\nEpoch 20/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.5560 - acc: 0.8095 - val_loss: 0.5972 - val_acc: 0.7980\nEpoch 21/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.5383 - acc: 0.8191 - val_loss: 0.5851 - val_acc: 0.7994\nEpoch 22/50\n390/390 [==============================] - 20s 51ms/step - loss: 0.5347 - acc: 0.8175 - val_loss: 0.5900 - val_acc: 0.8026\nEpoch 23/50\n390/390 [==============================] - 20s 51ms/step - loss: 0.5215 - acc: 0.8228 - val_loss: 0.5930 - val_acc: 0.7999\nEpoch 24/50\n390/390 [==============================] - 20s 51ms/step - loss: 0.5124 - acc: 0.8264 - val_loss: 0.5796 - val_acc: 0.8022\nEpoch 25/50\n390/390 [==============================] - 20s 51ms/step - loss: 0.5192 - acc: 0.8248 - val_loss: 0.5859 - val_acc: 0.8066\nEpoch 26/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.5001 - acc: 0.8280 - val_loss: 0.6010 - val_acc: 0.8034\nEpoch 27/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.5007 - acc: 0.8303 - val_loss: 0.6054 - val_acc: 0.8039\nEpoch 28/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.4885 - acc: 0.8331 - val_loss: 0.5698 - val_acc: 0.8086\nEpoch 29/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4752 - acc: 0.8377 - val_loss: 0.5622 - val_acc: 0.8141\nEpoch 30/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.4723 - acc: 0.8394 - val_loss: 0.5789 - val_acc: 0.8144\nEpoch 31/50\n390/390 [==============================] - 20s 53ms/step - loss: 0.4576 - acc: 0.8430 - val_loss: 0.5675 - val_acc: 0.8161\nEpoch 32/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.4673 - acc: 0.8427 - val_loss: 0.5968 - val_acc: 0.8051\nEpoch 33/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.4601 - acc: 0.8435 - val_loss: 0.5987 - val_acc: 0.8089\nEpoch 34/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4453 - acc: 0.8486 - val_loss: 0.5813 - val_acc: 0.8075\nEpoch 35/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4460 - acc: 0.8476 - val_loss: 0.5837 - val_acc: 0.8117\nEpoch 36/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.4416 - acc: 0.8505 - val_loss: 0.5822 - val_acc: 0.8103\nEpoch 37/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4390 - acc: 0.8516 - val_loss: 0.5441 - val_acc: 0.8199\nEpoch 38/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4308 - acc: 0.8544 - val_loss: 0.6087 - val_acc: 0.8055\nEpoch 39/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4179 - acc: 0.8590 - val_loss: 0.5597 - val_acc: 0.8200\nEpoch 40/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4261 - acc: 0.8561 - val_loss: 0.5471 - val_acc: 0.8239\nEpoch 41/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.4226 - acc: 0.8566 - val_loss: 0.5659 - val_acc: 0.8150\nEpoch 42/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4237 - acc: 0.8574 - val_loss: 0.5865 - val_acc: 0.8149\nEpoch 43/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4100 - acc: 0.8618 - val_loss: 0.5587 - val_acc: 0.8203\nEpoch 44/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.4129 - acc: 0.8591 - val_loss: 0.5484 - val_acc: 0.8236\nEpoch 45/50\n390/390 [==============================] - 21s 53ms/step - loss: 0.4008 - acc: 0.8650 - val_loss: 0.5541 - val_acc: 0.8201\nEpoch 46/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.3984 - acc: 0.8658 - val_loss: 0.5777 - val_acc: 0.8184\nEpoch 47/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.3953 - acc: 0.8653 - val_loss: 0.5694 - val_acc: 0.8182\nEpoch 48/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.3907 - acc: 0.8678 - val_loss: 0.5620 - val_acc: 0.8217\nEpoch 49/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.3820 - acc: 0.8729 - val_loss: 0.5992 - val_acc: 0.8101\nEpoch 50/50\n390/390 [==============================] - 20s 52ms/step - loss: 0.3864 - acc: 0.8697 - val_loss: 0.5734 - val_acc: 0.8214\nModel took 1016.55 seconds to train\n"
],
[
"from keras.callbacks import LearningRateScheduler\ndef scheduler(epoch, lr):\n lrate = 0.001\n # if epoch > 15:\n # lrate = 0.0003\n if epoch > 35:\n lrate = 0.0005\n if epoch > 60:\n lrate = 0.0003\n if epoch > 100:\n lrate = 0.0001\n\n return lrate\n # return round(0.003 * 1/(1 + 0.319 * epoch), 10)",
"_____no_output_____"
],
[
"# Define the model\nmy_model = Sequential()\n\nmy_model.add(SeparableConvolution2D(96, 3, 3, border_mode='same', input_shape=(32, 32, 3), activation='relu')) # 32*32*96\nmy_model.add(BatchNormalization())\nmy_model.add(Dropout(0.1))\n\nmy_model.add(SeparableConvolution2D(96, 3, 3, border_mode='valid', activation='relu')) # 30*30*96\nmy_model.add(BatchNormalization())\nmy_model.add(Dropout(0.1))\n\nmy_model.add(MaxPooling2D(pool_size=(2, 2))) # 15*15*96\nmy_model.add(Dropout(0.1))\n\nmy_model.add(SeparableConvolution2D(192, 3, 3, border_mode='same', activation='relu')) # 15*15*192\nmy_model.add(BatchNormalization())\nmy_model.add(Dropout(0.1))\n\nmy_model.add(SeparableConvolution2D(192, 3, 3, border_mode='valid', activation='relu')) # 13*13*192\nmy_model.add(BatchNormalization())\nmy_model.add(Dropout(0.1))\n\nmy_model.add(MaxPooling2D(pool_size=(2, 2))) # 6*6*192\nmy_model.add(Dropout(0.1))\n\nmy_model.add(SeparableConvolution2D(96, 3, 3, border_mode='same', activation='relu')) # 6*6*96\nmy_model.add(BatchNormalization())\nmy_model.add(Dropout(0.1))\n\nmy_model.add(SeparableConvolution2D(48, 3, 3, border_mode='valid', activation='relu')) # 4*4*48\nmy_model.add(BatchNormalization())\nmy_model.add(Dropout(0.1))\n\nmy_model.add(AveragePooling2D())\nmy_model.add(Flatten())\nmy_model.add(Dense(num_classes, activation='softmax'))\n\n# Compile the model\nmy_model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:190: The name tf.get_default_session is deprecated. Please use tf.compat.v1.get_default_session instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:197: The name tf.ConfigProto is deprecated. Please use tf.compat.v1.ConfigProto instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:203: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:207: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:216: The name tf.is_variable_initialized is deprecated. Please use tf.compat.v1.is_variable_initialized instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:223: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:2041: The name tf.nn.fused_batch_norm is deprecated. Please use tf.compat.v1.nn.fused_batch_norm instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:4271: The name tf.nn.avg_pool is deprecated. Please use tf.nn.avg_pool2d instead.\n\n"
],
[
"my_model.summary()",
"Model: \"sequential_2\"\n_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\nseparable_conv2d_1 (Separabl (None, 32, 32, 96) 411 \n_________________________________________________________________\nbatch_normalization_1 (Batch (None, 32, 32, 96) 384 \n_________________________________________________________________\ndropout_6 (Dropout) (None, 32, 32, 96) 0 \n_________________________________________________________________\nseparable_conv2d_2 (Separabl (None, 30, 30, 96) 10176 \n_________________________________________________________________\nbatch_normalization_2 (Batch (None, 30, 30, 96) 384 \n_________________________________________________________________\ndropout_7 (Dropout) (None, 30, 30, 96) 0 \n_________________________________________________________________\nmax_pooling2d_4 (MaxPooling2 (None, 15, 15, 96) 0 \n_________________________________________________________________\ndropout_8 (Dropout) (None, 15, 15, 96) 0 \n_________________________________________________________________\nseparable_conv2d_3 (Separabl (None, 15, 15, 192) 19488 \n_________________________________________________________________\nbatch_normalization_3 (Batch (None, 15, 15, 192) 768 \n_________________________________________________________________\ndropout_9 (Dropout) (None, 15, 15, 192) 0 \n_________________________________________________________________\nseparable_conv2d_4 (Separabl (None, 13, 13, 192) 38784 \n_________________________________________________________________\nbatch_normalization_4 (Batch (None, 13, 13, 192) 768 \n_________________________________________________________________\ndropout_10 (Dropout) (None, 13, 13, 192) 0 \n_________________________________________________________________\nmax_pooling2d_5 (MaxPooling2 (None, 6, 6, 192) 0 \n_________________________________________________________________\ndropout_11 (Dropout) (None, 6, 6, 192) 0 \n_________________________________________________________________\nseparable_conv2d_5 (Separabl (None, 6, 6, 96) 20256 \n_________________________________________________________________\nbatch_normalization_5 (Batch (None, 6, 6, 96) 384 \n_________________________________________________________________\ndropout_12 (Dropout) (None, 6, 6, 96) 0 \n_________________________________________________________________\nseparable_conv2d_6 (Separabl (None, 4, 4, 48) 5520 \n_________________________________________________________________\nbatch_normalization_6 (Batch (None, 4, 4, 48) 192 \n_________________________________________________________________\ndropout_13 (Dropout) (None, 4, 4, 48) 0 \n_________________________________________________________________\naverage_pooling2d_1 (Average (None, 2, 2, 48) 0 \n_________________________________________________________________\nflatten_2 (Flatten) (None, 192) 0 \n_________________________________________________________________\ndense_4 (Dense) (None, 10) 1930 \n=================================================================\nTotal params: 99,445\nTrainable params: 98,005\nNon-trainable params: 1,440\n_________________________________________________________________\n"
],
[
"# train the model\nmy_start = time.time()\n# Train the model\nmy_model_info = my_model.fit_generator(datagen.flow(train_features, train_labels, batch_size = 128),\n samples_per_epoch = train_features.shape[0], nb_epoch = 50, \n callbacks=[LearningRateScheduler(scheduler,verbose=1)], \n validation_data = (test_features, test_labels), verbose=1)\nmy_end = time.time()\nprint (\"Model took %0.2f seconds to train\"%(my_end - my_start))\n# plot model history\nplot_model_history(my_model_info)\n# compute test accuracy\nprint (\"Accuracy on test data is: %0.2f\"%accuracy(test_features, test_labels, my_model))",
"WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/ops/math_grad.py:1424: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse tf.where in 2.0, which has the same broadcast rule as np.where\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:1033: The name tf.assign_add is deprecated. Please use tf.compat.v1.assign_add instead.\n\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:1020: The name tf.assign is deprecated. Please use tf.compat.v1.assign instead.\n\nEpoch 1/50\n\nEpoch 00001: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 58s 149ms/step - loss: 1.5557 - acc: 0.4373 - val_loss: 1.4361 - val_acc: 0.5415\nEpoch 2/50\n\nEpoch 00002: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 1.1925 - acc: 0.5738 - val_loss: 1.1993 - val_acc: 0.5945\nEpoch 3/50\n\nEpoch 00003: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 1.0417 - acc: 0.6285 - val_loss: 1.0687 - val_acc: 0.6524\nEpoch 4/50\n\nEpoch 00004: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.9511 - acc: 0.6644 - val_loss: 0.9113 - val_acc: 0.6927\nEpoch 5/50\n\nEpoch 00005: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.8848 - acc: 0.6886 - val_loss: 0.8235 - val_acc: 0.7107\nEpoch 6/50\n\nEpoch 00006: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.8382 - acc: 0.7048 - val_loss: 0.7792 - val_acc: 0.7362\nEpoch 7/50\n\nEpoch 00007: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.7982 - acc: 0.7199 - val_loss: 0.7395 - val_acc: 0.7438\nEpoch 8/50\n\nEpoch 00008: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.7557 - acc: 0.7351 - val_loss: 0.7790 - val_acc: 0.7412\nEpoch 9/50\n\nEpoch 00009: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.7284 - acc: 0.7459 - val_loss: 0.7304 - val_acc: 0.7532\nEpoch 10/50\n\nEpoch 00010: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.7134 - acc: 0.7513 - val_loss: 0.6800 - val_acc: 0.7726\nEpoch 11/50\n\nEpoch 00011: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.6872 - acc: 0.7598 - val_loss: 0.6862 - val_acc: 0.7683\nEpoch 12/50\n\nEpoch 00012: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.6756 - acc: 0.7645 - val_loss: 0.7042 - val_acc: 0.7661\nEpoch 13/50\n\nEpoch 00013: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.6504 - acc: 0.7739 - val_loss: 0.5856 - val_acc: 0.8024\nEpoch 14/50\n\nEpoch 00014: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 52s 133ms/step - loss: 0.6306 - acc: 0.7791 - val_loss: 0.6116 - val_acc: 0.7919\nEpoch 15/50\n\nEpoch 00015: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.6247 - acc: 0.7830 - val_loss: 0.6492 - val_acc: 0.7874\nEpoch 16/50\n\nEpoch 00016: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.6119 - acc: 0.7852 - val_loss: 0.5855 - val_acc: 0.8039\nEpoch 17/50\n\nEpoch 00017: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5965 - acc: 0.7922 - val_loss: 0.6048 - val_acc: 0.8000\nEpoch 18/50\n\nEpoch 00018: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5893 - acc: 0.7952 - val_loss: 0.5782 - val_acc: 0.8048\nEpoch 19/50\n\nEpoch 00019: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5768 - acc: 0.7986 - val_loss: 0.7024 - val_acc: 0.7748\nEpoch 20/50\n\nEpoch 00020: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5700 - acc: 0.8018 - val_loss: 0.6498 - val_acc: 0.7898\nEpoch 21/50\n\nEpoch 00021: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5562 - acc: 0.8066 - val_loss: 0.5830 - val_acc: 0.8055\nEpoch 22/50\n\nEpoch 00022: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5500 - acc: 0.8074 - val_loss: 0.6385 - val_acc: 0.7967\nEpoch 23/50\n\nEpoch 00023: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5422 - acc: 0.8119 - val_loss: 0.5620 - val_acc: 0.8101\nEpoch 24/50\n\nEpoch 00024: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5399 - acc: 0.8122 - val_loss: 0.5218 - val_acc: 0.8270\nEpoch 25/50\n\nEpoch 00025: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5345 - acc: 0.8123 - val_loss: 0.5354 - val_acc: 0.8197\nEpoch 26/50\n\nEpoch 00026: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5185 - acc: 0.8199 - val_loss: 0.5285 - val_acc: 0.8251\nEpoch 27/50\n\nEpoch 00027: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5160 - acc: 0.8196 - val_loss: 0.5104 - val_acc: 0.8308\nEpoch 28/50\n\nEpoch 00028: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5076 - acc: 0.8235 - val_loss: 0.5193 - val_acc: 0.8229\nEpoch 29/50\n\nEpoch 00029: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.5084 - acc: 0.8217 - val_loss: 0.4858 - val_acc: 0.8354\nEpoch 30/50\n\nEpoch 00030: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4962 - acc: 0.8258 - val_loss: 0.5025 - val_acc: 0.8300\nEpoch 31/50\n\nEpoch 00031: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4956 - acc: 0.8273 - val_loss: 0.5164 - val_acc: 0.8296\nEpoch 32/50\n\nEpoch 00032: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4901 - acc: 0.8272 - val_loss: 0.5167 - val_acc: 0.8281\nEpoch 33/50\n\nEpoch 00033: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4877 - acc: 0.8302 - val_loss: 0.4924 - val_acc: 0.8374\nEpoch 34/50\n\nEpoch 00034: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4775 - acc: 0.8336 - val_loss: 0.4772 - val_acc: 0.8394\nEpoch 35/50\n\nEpoch 00035: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4805 - acc: 0.8332 - val_loss: 0.4876 - val_acc: 0.8343\nEpoch 36/50\n\nEpoch 00036: LearningRateScheduler setting learning rate to 0.001.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4775 - acc: 0.8338 - val_loss: 0.4590 - val_acc: 0.8479\nEpoch 37/50\n\nEpoch 00037: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4399 - acc: 0.8485 - val_loss: 0.4574 - val_acc: 0.8473\nEpoch 38/50\n\nEpoch 00038: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4317 - acc: 0.8486 - val_loss: 0.4420 - val_acc: 0.8557\nEpoch 39/50\n\nEpoch 00039: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 131ms/step - loss: 0.4296 - acc: 0.8508 - val_loss: 0.4718 - val_acc: 0.8473\nEpoch 40/50\n\nEpoch 00040: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4260 - acc: 0.8526 - val_loss: 0.4652 - val_acc: 0.8474\nEpoch 41/50\n\nEpoch 00041: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4213 - acc: 0.8520 - val_loss: 0.4234 - val_acc: 0.8597\nEpoch 42/50\n\nEpoch 00042: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4243 - acc: 0.8515 - val_loss: 0.4496 - val_acc: 0.8521\nEpoch 43/50\n\nEpoch 00043: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4212 - acc: 0.8524 - val_loss: 0.4485 - val_acc: 0.8518\nEpoch 44/50\n\nEpoch 00044: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4135 - acc: 0.8568 - val_loss: 0.4483 - val_acc: 0.8548\nEpoch 45/50\n\nEpoch 00045: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4084 - acc: 0.8556 - val_loss: 0.4329 - val_acc: 0.8560\nEpoch 46/50\n\nEpoch 00046: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4097 - acc: 0.8578 - val_loss: 0.4330 - val_acc: 0.8581\nEpoch 47/50\n\nEpoch 00047: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4078 - acc: 0.8569 - val_loss: 0.4666 - val_acc: 0.8483\nEpoch 48/50\n\nEpoch 00048: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 131ms/step - loss: 0.4090 - acc: 0.8565 - val_loss: 0.4320 - val_acc: 0.8585\nEpoch 49/50\n\nEpoch 00049: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4065 - acc: 0.8577 - val_loss: 0.4428 - val_acc: 0.8583\nEpoch 50/50\n\nEpoch 00050: LearningRateScheduler setting learning rate to 0.0005.\n390/390 [==============================] - 51s 130ms/step - loss: 0.4009 - acc: 0.8598 - val_loss: 0.4421 - val_acc: 0.8586\nModel took 2559.65 seconds to train\n"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a6610afe372a360388f08f53a8f0e5810591480
| 941,582 |
ipynb
|
Jupyter Notebook
|
Data Visualization/Matplotlib III - Exercise.ipynb
|
robynmundle/data_science
|
f63d8df0302d3a986f4e500aa9c2d5df1b35904f
|
[
"MIT"
] | null | null | null |
Data Visualization/Matplotlib III - Exercise.ipynb
|
robynmundle/data_science
|
f63d8df0302d3a986f4e500aa9c2d5df1b35904f
|
[
"MIT"
] | null | null | null |
Data Visualization/Matplotlib III - Exercise.ipynb
|
robynmundle/data_science
|
f63d8df0302d3a986f4e500aa9c2d5df1b35904f
|
[
"MIT"
] | null | null | null | 3,128.179402 | 281,120 | 0.963993 |
[
[
[
"# import mplot3d\nfrom mpl_toolkits import mplot3d\n\n# import numpy\nimport numpy as np\n\n# import matplotlib\nimport matplotlib.pyplot as plt",
"_____no_output_____"
]
],
[
[
"* Create wireframe of hyperbolic paraboloid on the domain <-10,10>X<-10,10>\n* hyperbolic paraboloid equation : z = x*x - y*y",
"_____no_output_____"
]
],
[
[
"# wireframe\n\n# create the figure and axes\nfig,ax = plt.subplots(figsize=(12.8,9.6))\n# create an empty canvas\nax = plt.axes(projection='3d')\n\n# divide the interval <-10,10> into 500 parts\nx = np.linspace(-10,10,500)\ny = np.linspace(-10,10,500)\n\n# a Cartesian product\nX,Y = np.meshgrid(x,y)\n\nZ = (X*X) - (Y*Y)\n\n\n# a wireframe\nax.plot_wireframe(X, Y, Z, color='r')\nplt.show()",
"_____no_output_____"
]
],
[
[
"* Create surface plot of eliptical paraboloid on the domain <-10,10>X<-10,10>\n* eliptical paraboloid : z = x*x + y*y",
"_____no_output_____"
]
],
[
[
"# surface plot\n# create the figure and axes\nfig,ax = plt.subplots(figsize=(12.8,9.6))\n# create an empty canvas\nax = plt.axes(projection='3d')\n\n# divide the interval <-10,10> into 500 parts\nx = np.linspace(-10,10,500)\ny = np.linspace(-10,10,500)\n\n# a Cartesian product\nX,Y = np.meshgrid(x,y)\n\nz = (x*x) + (y*y)\n\n# a surface plot\nax.plot_surface(X, Y, Z, cmap='jet')\nplt.show()",
"_____no_output_____"
]
],
[
[
"* Create surface plot of eliptical paraboloid on the domain <-10,10>X<-10,10>\n* Create also contour projection on the plane z=0 (HINT: use parameter 'zdir' of contour plot)\n* eliptical paraboloid : z = x*x + y*y",
"_____no_output_____"
]
],
[
[
"# contour plot\nfig, ax = plt.subplots(figsize=(12.8,9.6))\n\n# set a 3D projection\nax = plt.axes(projection='3d')\n\nax.contour(X, Y, Z, 40, zdir='z', cmap='jet')\nax.set_xlabel('X')\nax.set_ylabel('Y')\nax.set_zlabel('Z')\nax.set_zlim(-100, 200)\nplt.show()",
"_____no_output_____"
],
[
"# create the figure and axes\nfig,ax = plt.subplots(figsize=(12.8,9.6))\n\n# set a 3D projection\nax = plt.axes(projection='3d')\n\n# the surface\nax.contour(X, Y, Z, 40, cmap='jet')\nplt.show()",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
4a662005b40e142c7746bb89a9886c3fba699548
| 34,575 |
ipynb
|
Jupyter Notebook
|
lessons/Chapter3/01_nonlinear_poisson_2d.ipynb
|
pyccel/IGA-Python
|
e3604ba3d76a20e3d30ed3c7c952dcd2dc8147bb
|
[
"MIT"
] | 2 |
2022-01-21T08:51:30.000Z
|
2022-03-17T12:14:02.000Z
|
lessons/Chapter3/01_nonlinear_poisson_2d.ipynb
|
pyccel/IGA-Python
|
e3604ba3d76a20e3d30ed3c7c952dcd2dc8147bb
|
[
"MIT"
] | null | null | null |
lessons/Chapter3/01_nonlinear_poisson_2d.ipynb
|
pyccel/IGA-Python
|
e3604ba3d76a20e3d30ed3c7c952dcd2dc8147bb
|
[
"MIT"
] | 1 |
2022-03-01T06:41:54.000Z
|
2022-03-01T06:41:54.000Z
| 62.297297 | 10,964 | 0.799103 |
[
[
[
"from sympy import pi, cos, sin, symbols\nfrom sympy.utilities.lambdify import implemented_function\nimport pytest\n\nfrom sympde.calculus import grad, dot\nfrom sympde.calculus import laplace\nfrom sympde.topology import ScalarFunctionSpace\nfrom sympde.topology import element_of\nfrom sympde.topology import NormalVector\nfrom sympde.topology import Square\nfrom sympde.topology import Union\nfrom sympde.expr import BilinearForm, LinearForm, integral\nfrom sympde.expr import Norm\nfrom sympde.expr import find, EssentialBC\nfrom sympde.expr.expr import linearize\n\nfrom psydac.fem.basic import FemField\nfrom psydac.api.discretization import discretize\n\nx,y,z = symbols('x1, x2, x3')",
"_____no_output_____"
]
],
[
[
"# Non-Linear Poisson in 2D",
"_____no_output_____"
],
[
"In this section, we consider the non-linear Poisson problem:\n\n$$\n-\\nabla \\cdot \\left( (1+u^2) \\nabla u \\right) = f, \\Omega\n\\\\\nu = 0, \\partial \\Omega\n$$\nwhere $\\Omega$ denotes the unit square.\n\nFor testing, we shall take a function $u$ that fulfills the boundary condition, the compute $f$ as\n\n$$\nf(x,y) = -\\nabla^2 u - F(u)\n$$",
"_____no_output_____"
],
[
"The weak formulation is\n\n$$\n\\int_{\\Omega} (1+u^2) \\nabla u \\cdot \\nabla v ~ d\\Omega = \\int_{\\Omega} f v ~d\\Omega, \\quad \\forall v \\in \\mathcal{V}\n$$",
"_____no_output_____"
],
[
"For the sack of generality, we shall consider the linear form\n\n$$\nG(v;u,w) := \\int_{\\Omega} (1+w^2) \\nabla u \\cdot \\nabla v ~ d\\Omega, \\quad \\forall u,v,w \\in \\mathcal{V}\n$$\n",
"_____no_output_____"
],
[
"Our problem is then\n\n$$\n\\mbox{Find } u \\in \\mathcal{V}, \\mbox{such that}\\\\\nG(v;u,u) = l(v), \\quad \\forall v \\in \\mathcal{V}\n$$\n\nwhere\n\n$$\nl(v) := \\int_{\\Omega} f v ~d\\Omega, \\quad \\forall v \\in \\mathcal{V}\n$$",
"_____no_output_____"
],
[
"#### Topological domain",
"_____no_output_____"
]
],
[
[
"domain = Square()\nB_dirichlet_0 = domain.boundary",
"_____no_output_____"
]
],
[
[
"#### Function Space",
"_____no_output_____"
]
],
[
[
"V = ScalarFunctionSpace('V', domain)",
"_____no_output_____"
]
],
[
[
"#### Defining the Linear form $G$",
"_____no_output_____"
]
],
[
[
"u = element_of(V, name='u')\nv = element_of(V, name='v')\nw = element_of(V, name='w')\n\n# Linear form g: V --> R\ng = LinearForm(v, integral(domain, (1+w**2)*dot(grad(u), grad(v))))",
"_____no_output_____"
]
],
[
[
"#### Defining the Linear form L",
"_____no_output_____"
]
],
[
[
"solution = sin(pi*x)*sin(pi*y)\nf = 2*pi**2*(sin(pi*x)**2*sin(pi*y)**2 + 1)*sin(pi*x)*sin(pi*y) - 2*pi**2*sin(pi*x)**3*sin(pi*y)*cos(pi*y)**2 - 2*pi**2*sin(pi*x)*sin(pi*y)**3*cos(pi*x)**2",
"_____no_output_____"
],
[
"# Linear form l: V --> R\nl = LinearForm(v, integral(domain, f * v))",
"_____no_output_____"
]
],
[
[
"### Picard Method\n\n$$\n\\mbox{Find } u_{n+1} \\in \\mathcal{V}_h, \\mbox{such that}\\\\\nG(v;u_{n+1},u_n) = l(v), \\quad \\forall v \\in \\mathcal{V}_h\n$$",
"_____no_output_____"
],
[
"### Newton Method\n\nLet's define \n$$\nF(v;u) := G(v;u,u) -l(v), \\quad \\forall v \\in \\mathcal{V}\n$$\n\nNewton method writes\n$$\n\\mbox{Find } u_{n+1} \\in \\mathcal{V}_h, \\mbox{such that}\\\\\nF^{\\prime}(\\delta u,v; u_n) = - F(v;u_n), \\quad \\forall v \\in \\mathcal{V} \\\\\nu_{n+1} := u_{n} + \\delta u, \\quad \\delta u \\in \\mathcal{V}\n$$",
"_____no_output_____"
],
[
"#### Computing $F^{\\prime}$ the derivative of $F$\n\n**SymPDE** allows you to linearize a linear form and get a bilinear form, using the function **linearize**",
"_____no_output_____"
]
],
[
[
"F = LinearForm(v, g(v,w=u)-l(v))\ndu = element_of(V, name='du')\n\nFprime = linearize(F, u, trials=du)",
"_____no_output_____"
]
],
[
[
"## Picard Method",
"_____no_output_____"
],
[
"#### Abstract Model",
"_____no_output_____"
]
],
[
[
"un = element_of(V, name='un')\n\n# Bilinear form a: V x V --> R\na = BilinearForm((u, v), g(v, u=u,w=un))\n\n# Dirichlet boundary conditions\nbc = [EssentialBC(u, 0, B_dirichlet_0)]\n\n# Variational problem\nequation = find(u, forall=v, lhs=a(u, v), rhs=l(v), bc=bc)\n\n# Error norms\nerror = u - solution\nl2norm = Norm(error, domain, kind='l2')",
"_____no_output_____"
]
],
[
[
"#### Discretization",
"_____no_output_____"
]
],
[
[
"# Create computational domain from topological domain\ndomain_h = discretize(domain, ncells=[16,16], comm=None)\n\n# Discrete spaces\nVh = discretize(V, domain_h, degree=[2,2])\n\n# Discretize equation using Dirichlet bc\nequation_h = discretize(equation, domain_h, [Vh, Vh])\n\n# Discretize error norms\nl2norm_h = discretize(l2norm, domain_h, Vh)",
"_____no_output_____"
]
],
[
[
"#### Picard solver",
"_____no_output_____"
]
],
[
[
"def picard(niter=10):\n Un = FemField( Vh, Vh.vector_space.zeros() )\n\n for i in range(niter):\n Un = equation_h.solve(un=Un)\n\n # Compute error norms\n l2_error = l2norm_h.assemble(u=Un)\n\n print('l2_error = ', l2_error)\n \n return Un",
"_____no_output_____"
],
[
"Un = picard(niter=5)",
"l2_error = 0.1041623200353605\nl2_error = 0.019794500321162495\nl2_error = 0.0032729508639899856\nl2_error = 0.00043360362547357383\nl2_error = 5.42731202704659e-05\n"
],
[
"from matplotlib import pyplot as plt\nfrom utilities.plot import plot_field_2d\n\nnbasis = [w.nbasis for w in Vh.spaces]\np1,p2 = Vh.degree\nx = Un.coeffs._data[p1:-p1,p2:-p2]\nu = x.reshape(nbasis)\nplot_field_2d(Vh.knots, Vh.degree, u) ; plt.colorbar()",
"_____no_output_____"
]
],
[
[
"## Newton Method",
"_____no_output_____"
],
[
"#### Abstract Model",
"_____no_output_____"
]
],
[
[
"# Dirichlet boundary conditions\nbc = [EssentialBC(du, 0, B_dirichlet_0)]\n\n# Variational problem\nequation = find(du, forall=v, lhs=Fprime(du, v,u=un), rhs=-F(v,u=un), bc=bc)",
"_____no_output_____"
]
],
[
[
"#### Discretization",
"_____no_output_____"
]
],
[
[
"# Create computational domain from topological domain\ndomain_h = discretize(domain, ncells=[16,16], comm=None)\n\n# Discrete spaces\nVh = discretize(V, domain_h, degree=[2,2])\n\n# Discretize equation using Dirichlet bc\nequation_h = discretize(equation, domain_h, [Vh, Vh])\n\n# Discretize error norms\nl2norm_h = discretize(l2norm, domain_h, Vh)",
"_____no_output_____"
]
],
[
[
"#### Newton Solver",
"_____no_output_____"
]
],
[
[
"def newton(niter=10):\n Un = FemField( Vh, Vh.vector_space.zeros() )\n\n for i in range(niter):\n delta_x = equation_h.solve(un=Un)\n Un = FemField( Vh, delta_x.coeffs + Un.coeffs )\n\n # Compute error norms\n l2_error = l2norm_h.assemble(u=Un)\n\n print('l2_error = ', l2_error)\n \n return Un",
"_____no_output_____"
],
[
"un = newton(niter=5)",
"l2_error = 0.1041623200353605\nl2_error = 0.011366075929785831\nl2_error = 0.00019827168123576672\nl2_error = 2.6121623867937704e-05\nl2_error = 2.612317723593809e-05\n"
],
[
"nbasis = [w.nbasis for w in Vh.spaces]\np1,p2 = Vh.degree\nx = un.coeffs._data[p1:-p1,p2:-p2]\nu = x.reshape(nbasis)\nplot_field_2d(Vh.knots, Vh.degree, u) ; plt.colorbar()",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
4a662a25eb29fb38698a00f0508decd85769e845
| 37,353 |
ipynb
|
Jupyter Notebook
|
test/ipynb/groovy/ChartingTest.ipynb
|
clouds56/beakerx
|
c434e0c0619c6eea4f3dcf28530260da5041016e
|
[
"Apache-2.0"
] | 1 |
2018-10-16T18:59:59.000Z
|
2018-10-16T18:59:59.000Z
|
test/ipynb/groovy/ChartingTest.ipynb
|
noisyoscillator/beakerx
|
0a31fa209ea46b3ad184b583307460b38acfee86
|
[
"Apache-2.0"
] | null | null | null |
test/ipynb/groovy/ChartingTest.ipynb
|
noisyoscillator/beakerx
|
0a31fa209ea46b3ad184b583307460b38acfee86
|
[
"Apache-2.0"
] | null | null | null | 93.3825 | 21,072 | 0.676813 |
[
[
[
"empty"
]
]
] |
[
"empty"
] |
[
[
"empty"
]
] |
4a663f6e69245867038f337449275e51def05c5a
| 42,249 |
ipynb
|
Jupyter Notebook
|
advanced/regular_expressions/regex.ipynb
|
rocabrera/python-learning
|
578b6f6f64a59039956e2ff8eca9eb486127722f
|
[
"MIT"
] | 3 |
2021-04-16T01:30:05.000Z
|
2021-07-22T21:00:45.000Z
|
advanced/regular_expressions/regex.ipynb
|
rocabrera/python-learning
|
578b6f6f64a59039956e2ff8eca9eb486127722f
|
[
"MIT"
] | null | null | null |
advanced/regular_expressions/regex.ipynb
|
rocabrera/python-learning
|
578b6f6f64a59039956e2ff8eca9eb486127722f
|
[
"MIT"
] | null | null | null | 28.917864 | 435 | 0.49942 |
[
[
[
"import re",
"_____no_output_____"
]
],
[
[
"The re module uses a backtracking regular expression engine",
"_____no_output_____"
],
[
"Regular expressions match text patterns \n\nUse case examples:\n\n- Check if an email or phone number was written correctly.\n- Split text by some mark (comma, dot, newline) which may be useful to parse data.\n- Get content from HTML tags.\n- Improve your linux command skills.",
"_____no_output_____"
],
[
"However ...\n\n>Some people, when confronted with a problem, think \"I know, I'll use regular expressions\". Now they have two problems - Jamie Zawinski, 1997",
"_____no_output_____"
],
[
"## **Python String** ",
"_____no_output_____"
],
[
"\\begin{array}{ccc}\n\\hline Type & Prefixed & Description \\\\\\hline\n \\text{String} & - & \\text{They are string literals. They're Unicode. The backslash is\nnecessary to escape meaningful characters.} \\\\\n \\text{Raw String} & \\text{r or R} & \\text{They're equal to literal strings with the exception of the\nbackslashes, which are treated as normal characters.} \\\\\n \\text{Byte String} & \\text{b or B} & \\text{Strings represented as bytes. They can only contain ASCII\ncharacters; if the byte is greater than 128, it must be escaped.} \\\\\n\\end{array}",
"_____no_output_____"
]
],
[
[
"#Normal String\nprint(\"feijão com\\t limão\")\n#Raw String\nprint(r\"feijão com\\t limão\")\n#Byte String\nprint(b\"feij\\xc3\\xa3o com\\t lim\\xc3\\xa3o\")",
"feijão com\t limão\nfeijão com\\t limão\nb'feij\\xc3\\xa3o com\\t lim\\xc3\\xa3o'\n"
],
[
"str(b\"feij\\xc3\\xa3o com\\t lim\\xc3\\xa3o\", \"utf-8\")",
"_____no_output_____"
]
],
[
[
"## **General** ",
"_____no_output_____"
],
[
"Our build blocks are composed of:\n\n- Literals\n- Metacharacter\n - Backslash: \\\\\n - Caret: \\^\n - Dollar Sign: \\$\n - Dot: \\.\n - Pipe Symbol: \\|\n - Question Mark: \\?\n - Asterisk: \\*\n - Plus sign: \\+\n - Opening parenthesis: \\(\n - Closing parenthesis: \\)\n - Opening square bracket: \\[\n - The opening curly brace: \\{\n",
"_____no_output_____"
],
[
"### **Literals** ",
"_____no_output_____"
]
],
[
[
"\"\"\"\nversion 1: with compile\n\"\"\"\ndef areYouHungry_v1(pattern, text):\n match = pattern.search(text)\n if match: print(\"HERE !!!\\n\")\n else: print(\"Sorry pal, you'll starve to death.\\n\")\n\nhelloWorldRegex = r\"rodrigo\"\n\npattern = re.compile(helloWorldRegex)\n\ntext1 = r\"Where can I find food here? - rodrigo\"\ntext2 = r\"Where can I find food here? - Rodrigo\"\n\nareYouHungry_v1(pattern,text1)\nareYouHungry_v1(pattern,text2)",
"HERE !!!\n\nSorry pal, you'll starve to death.\n\n"
],
[
"\"\"\"\nversion 2: without compile\n\"\"\"\ndef areYouHungry_v2(regex, text):\n match = re.search(regex, text)\n if match: print(\"HERE !!!\\n\")\n else: print(\"Sorry pal, you'll starve to death.\\n\")\n\nhelloWorldRegex = r\"rodrigo\"\n\ntext1 = r\"Where can I find food here? - rodrigo\"\ntext2 = r\"Where can I find food here? - Rodrigo\"\n\nareYouHungry_v2(helloWorldRegex, text1)\nareYouHungry_v2(helloWorldRegex, text2)",
"HERE !!!\n\nSorry pal, you'll starve to death.\n\n"
]
],
[
[
"### **Character classes**",
"_____no_output_____"
]
],
[
[
"\"\"\"\nversion 3: classes\n\"\"\"\ndef areYouHungry_v3(pattern, text):\n match = pattern.search(text)\n if match: print(\"Beer is also food !!\\n\")\n else: print(\"Sorry pal, you'll starve to death.\\n\")\n\nhelloWorldRegex = r\"[rR]odrigo\"\n\npattern = re.compile(helloWorldRegex)\n\ntext1 = r\"Where can I find food here? - rodrigo\"\ntext2 = r\"Where can I find food here? - Rodrigo\"\n\nareYouHungry_v3(pattern,text1)\nareYouHungry_v3(pattern,text2)",
"Beer is also food !!\n\nBeer is also food !!\n\n"
]
],
[
[
"Usual Classes:\n\n- [0-9]: Matches anything between 0 and 9.\n- [a-z]: Matches anything between a and z.\n- [A-Z]: Matches anything between A and Z.\n\nPredefined Classes:\n- **\\.** : Matches everything except newline.\n- Lower Case classes:\n - \\d : Same as [0-9].\n - \\s : Same as [ \\t\\n\\r\\f\\v] the first character of the class is the whitespace character.\n - \\w : Same as [a-zA-Z0-9_] the last character of the class is the underscore character.\n\n- Upper Case classes (the negation):\n - \\D : Matches non decimal digit, same as [^0-9].\n - \\S : Matches any non whitespace character [^ \\t\\n\\r\\f\\v].\n - \\W : Matches any non alphanumeric character [^a-zA-Z0-9_] .\n",
"_____no_output_____"
],
[
"Both codes will do the same ... \n\nThe re module keeps a cache with come compiled regex so you do not need to compile the regex everytime you call the function (technique called memoization). \n\nThe first version just give you a fine control ...",
"_____no_output_____"
],
[
"```Pattern``` was **re.Pattern** variable which has a lot of methods. Let's find out with methods are there using regular expression !!",
"_____no_output_____"
]
],
[
[
"helloWorldRegex = r\"[rR]odrigo\"\npattern = re.compile(helloWorldRegex)\npatternText = \"\\n\".join(dir(pattern))\npatternText",
"_____no_output_____"
],
[
"#Regex for does not start with “__”\npattern_list_methods = set(re.findall(r\"^(?!__).*$\", patternText, re.M))\nto_delete = [\"fullmatch\", \"groupindex\", \"pattern\", \"scanner\"]\npattern_list_methods.difference_update(to_delete)\nprint(pattern_list_methods)",
"{'groups', 'subn', 'match', 'search', 'sub', 'split', 'flags', 'finditer', 'findall'}\n"
]
],
[
[
"- RegexObject: It is also known as Pattern Object. It represents a compiled regular expression\n- MatchObject: It represents the matched pattern",
"_____no_output_____"
],
[
"### Regex Behavior",
"_____no_output_____"
]
],
[
[
"def isGotcha(match):\n if match: print(\"Found it\")\n else: print(\"None here\")\n \ndata = \"aaabbbccc\"\nmatch1 = re.match(\"\\w+\", data)\nisGotcha(match1)\nmatch2 = re.match(\"bbb\",data)\nisGotcha(match2)\nmatch3 = re.search(\"bbb\",data)\nisGotcha(match3)",
"Found it\nNone here\nFound it\n"
]
],
[
[
"\\begin{array}{rrr}\n\\hline \\text{match1} & \\text{match2} & \\text{match3}\\\\\\hline\n \\text{aaabbbccc} & \\text{aaabbbccc} & \\text{aaabbbccc}\\\\\n \\text{aabbbccc} & \\text{returns None} & \\text{aabbbccc}\\\\\n \\text{abbbccc} & - & \\text{abbbccc}\\\\\n \\text{bbbccc} & - & \\text{bbbccc}\\\\\n \\text{bbccc} & - & \\text{bbccc}\\\\\n \\text{bccc} & - & \\text{bccc}\\\\\n \\text{ccc} & - & \\text{returns Match}\\\\\n \\text{cc} & - & - \\\\\n \\text{c} & - & - \\\\\n \\text{returns None} & - & - \n\\end{array}",
"_____no_output_____"
],
[
"### Greedy Behavior",
"_____no_output_____"
]
],
[
[
"text = \"<b>foo</b> and <i>so on</i>\"\n\nmatch = re.match(\"<.*>\",text)\nprint(match)\nprint(match.group())",
"<re.Match object; span=(0, 27), match='<b>foo</b> and <i>so on</i>'>\n<b>foo</b> and <i>so on</i>\n"
],
[
"text = \"<b>foo</b> and <i>so on</i>\"\n\nmatch = re.match(\"<.*?>\",text)\nprint(match)\nprint(match.group())",
"<re.Match object; span=(0, 3), match='<b>'>\n<b>\n"
]
],
[
[
"The non-greedy behavior can be requested by adding an extra question mark to the quantifier; for example, ??, *? or +?. A quantifier marked as reluctant will behave like the exact opposite of the greedy ones. They will try to have the smallest match possible.",
"_____no_output_____"
],
[
"## **Problem 1** - Phone Number",
"_____no_output_____"
],
[
"### **Search** ",
"_____no_output_____"
]
],
[
[
"def isThere_v1(regexObject, text):\n if regexObject: return f\"Your number is: {regexObject.group()}!\"\n else: return \"Hey! I did not find it.\"\n\n \ntext = \"\"\" 9-96379889\n 96379889\n 996379889\n 9-9637-9889\n\n 42246889\n 4224-6889\n \n 99637 9889\n 9 96379889\n \"\"\"\n\n#The first character is not a number, but a whitespace.\nregex1 = re.search(r\"\\d?\", text)\n\n#Removing the whitespace character we find the number ! The ? operator means optional\nregex2 = re.search(r\"\\d?\", text.strip())\n\n#Then, it could appear a optional whitespace or -. We also get two decimal character with \\d\\d \nregex3 = re.search(r\"\\d?-?\\d\\d\", text.strip())\n\n#However we want more than one decimal chracter. This can be achievied by using the + operator\nregex4 = re.search(r\"\\d?-?\\d+\", text.strip())\n\n#Looking backwards $\nregex5 = re.search(r\"\\d?-?\\d+$\", text.strip())\n\n#Using class to get - or whitespace\nregex6 = re.search(r\"\\d?[-\\s]?\\d+$\", text.strip())\n\nregex_lst = [regex1, regex2, regex3, regex4, regex5, regex6]\nfor index, regex in enumerate(regex_lst):\n print(f\"Regex Number {index+1}\")\n print(isThere_v1(regex,text) + \"\\n\")",
"Regex Number 1\nYour number is: !\n\nRegex Number 2\nYour number is: 9!\n\nRegex Number 3\nYour number is: 9-96!\n\nRegex Number 4\nYour number is: 9-96379889!\n\nRegex Number 5\nYour number is: 96379889!\n\nRegex Number 6\nYour number is: 9 96379889!\n\n"
]
],
[
[
"### **Findall** ",
"_____no_output_____"
]
],
[
[
"def isThere_v2(regexObject, text):\n if regexObject: return f\"Uow phone numbers:\\n{regexObject} !\"\n else: return \"Hey! I did not find it.\"\n \ntext = \"\"\" 996349889\n 96359889\n 9-96349889\n \n 9-9634-9889\n 42256889\n 4225-6889\n \n 99634 9889\n 9 96349889\n \"\"\"\n#findall looks for every possible match.\n\nregex7 = re.findall(r\"\\d?[-\\s]?\\d+\", text)\n\"\"\"\nWhy is [... ' 9', '-96349889' ...] splited? \n\nStep1: \\d? is not consumed.\nStep2: [-\\s]? the whitespace is consumed.\nStep3: \\d+ Consumes 9 and stop due to the - character.\n\nTherefore ' 9' is recognized.\n\"\"\"\n\nregex8 = re.findall(r\"\\d?[-\\s]?\\d+[-\\s]?\\d+\", text.strip())\n\"\"\"\nWhy is [... ' 9-9634', '-9889' ...] splited? \n\nStep1: \\d? is consumed.\nStep2: [-\\s]? is consumed.\nStep3: \\d+ Consumes until the - character\nStep4: [-\\s]? is not consumed\nStep5: \\d+ is ignored because the first decimal was consumed in Step3\n\nTherefore ' 9-9634' is recognized.\n\"\"\"\n\n#Adds a restrition of 4 decimals in the first part.\nregex9 = re.findall(r\"\\d?[-\\s]?\\d{4}[-\\s]?\\d+\", text.strip())\n\n#Adds a restrition of 4 decimals in the second part forcing a number after the whitespace.\nregex10 = re.findall(r\"\\d?[-\\s]?\\d{4}[-\\s]?\\d{4}\", text.strip())\n\nregex_lst = [regex7, regex8, regex9, regex10]\n\nfor index, regex in enumerate(regex_lst):\n print(f\"Regex Number {index+7}\")\n print(isThere_v2(regex,text) + \"\\n\")",
"Regex Number 7\nUow phone numbers:\n[' 996349889', ' 96359889', ' 9', '-96349889', ' 9', '-9634', '-9889', ' 42256889', ' 4225', '-6889', ' 99634', ' 9889', ' 9', ' 96349889'] !\n\nRegex Number 8\nUow phone numbers:\n['996349889', ' 96359889', ' 9-96349889', ' 9-9634', '-9889', ' 42256889', ' 4225-6889', ' 99634 9889', ' 9 96349889'] !\n\nRegex Number 9\nUow phone numbers:\n['996349889', ' 96359889', '9-96349889', '9-9634-9889', ' 42256889', ' 4225-6889', ' 99634', '9 96349889'] !\n\nRegex Number 10\nUow phone numbers:\n['996349889', ' 96359889', '9-96349889', '9-9634-9889', ' 42256889', ' 4225-6889', '99634 9889', '9 96349889'] !\n\n"
],
[
"text_dirty = r\"\"\"996379889\n 96379889\n 9-96379889\n 9-9637-9889\n 42246889\n 4224-6889\n 99637 9889\n 9 96379889\n 777 777 777\n 90 329921 0\n 9999999999 9\n 8588588899436\n \"\"\"\n\n#Regex 10\nregex_dirty1 = re.findall(r\"\\d?[-\\s]?\\d{4}[-\\s]?\\d{4}\", text_dirty.strip())\n\n#Adding Negative look behind and negative look ahead\nregex_dirty2 = re.findall(r\"(?<!\\d)\\d?[-\\s]?\\d{4}[-\\s]?\\d{4}(?!\\d)\", text_dirty.strip())\n\n#\\b is a word boundary which depend on the contextkey contexts.\nregex_dirty3 = re.findall(r\"\\b\\d?[-\\s]?\\d{4}[-\\s]?\\d{4}\\b\", text_dirty.strip())\n\n\nregex_dirty_lst = [regex_dirty1, regex_dirty2, regex_dirty3]\n\nfor index, result in enumerate(map(lambda x: isThere_v2(x,text_dirty), regex_dirty_lst)):\n print(f\"Regex Dirty Number {index+1}\")\n print(result + \"\\n\")",
"Regex Dirty Number 1\nUow phone numbers:\n['996379889', ' 96379889', '9-96379889', '9-9637-9889', ' 42246889', ' 4224-6889', '99637 9889', '9 96379889', ' 99999999', ' 85885888'] !\n\nRegex Dirty Number 2\nUow phone numbers:\n['996379889', ' 96379889', '9-96379889', '9-9637-9889', ' 42246889', ' 4224-6889', '99637 9889', '9 96379889'] !\n\nRegex Dirty Number 3\nUow phone numbers:\n['996379889', '96379889', '9-96379889', '9-9637-9889', '42246889', '4224-6889', '99637 9889', '9 96379889'] !\n\n"
]
],
[
[
"### **Finditer** ",
"_____no_output_____"
],
[
"This is a lazy method !!",
"_____no_output_____"
]
],
[
[
"real_text_example = \"\"\"\n Lorem ipsum dolor sit amet, consectetur adipiscing elit. Duis viverra consectetur sodales. Vestibulum consequat, \n risus in sollicitudin imperdiet, velit 996379889 elit congue sem, vitae aliquet ligula mi eget justo. Nulla facilisi. \n Maecenas a egestas nisi. Morbi purus dolor, ornare ac dui a, eleifend dignissim nunc. Proin pellentesque dolor non lectus pellentesque \n tincidunt. Ut et 345.323.343-9 tempus orci. Duis molestie 9 96379889 cursus tortor vitae pretium. 4224-6889 Donec non sapien neque. Pellentesque urna ligula, finibus a lectus sit amet\n , ultricies cursus metus. Quisque eget orci et turpis faucibus 4224-6889 pharetra. \n \"\"\"\nmatch_genarator = re.finditer(r'\\b((?:9[ -]?)?\\d{4}[ \\-]?\\d{4})\\b', real_text_example.strip())",
"_____no_output_____"
],
[
"for match in match_genarator:\n print(f\"Phone Number: {match.group()}\\nText Position: {match.span()}\\n\")",
"Phone Number: 996379889\nText Position: (173, 182)\n\nPhone Number: 9 96379889\nText Position: (487, 497)\n\nPhone Number: 4224-6889\nText Position: (527, 536)\n\nPhone Number: 4224-6889\nText Position: (697, 706)\n\n"
]
],
[
[
"## **Problem 2** - Format Text",
"_____no_output_____"
],
[
"### **Groups**",
"_____no_output_____"
]
],
[
[
"email_text = \"hey my email is: localPart@domain\"",
"_____no_output_____"
]
],
[
[
"Using the parenthesis it is possible to capture a group:",
"_____no_output_____"
]
],
[
[
"match = re.search(\"(\\w+)@(\\w+)\", email_text)\nprint(match.group(1))\nprint(match.group(2))",
"localPart\ndomain\n"
]
],
[
[
"Using the following syntax it is possible to give a name to the group:\n```?P<name>pattern```",
"_____no_output_____"
]
],
[
[
"match = re.search(\"(?P<localPart>\\w+)@(?P<domain>\\w+)\", email_text)\nprint(match.group(\"localPart\"))\nprint(match.group(\"domain\"))",
"localPart\ndomain\n"
]
],
[
[
"### **Sub** ",
"_____no_output_____"
],
[
"Suppose a text with the following structure:\n\n```\ntime - day | usage | id, description \\n\n```\n\nDefinitely, a unique separator should be used .. However life is tough.",
"_____no_output_____"
]
],
[
[
"my_txt = r\"\"\"\n 20:18:14 - 21/01 | 0.65 | 3947kedj, No |dia| em que eu saí de casa\n 25:32:26 - 11/07 | 0.80 | 5679lqui, Minha mãe me disse: |filho|, vem cá\n 12:13:00 - 12/06 | 0.65 | 5249dqok, Passou a mão em meu cabelos\n 23:12:35 - 13/03 | 0.77 | 3434afdf, Olhou em meus |olhos|, começou falar\n 20:22:00 - 12/02 | 0.98 | 1111absd, We are the champions, my friends\n 22:12:00 - 07/03 | 0.65 | 4092bvds, And we'll keep on |fighting| till the end\n 22:52:59 - 30/02 | 0.41 | 9021poij, We are the |champions|, we are the champions\n 21:47:00 - 28/03 | 0.15 | 6342fdpo, No time for |losers|, 'cause we are the champions\n 19:19:00 - 31/08 | 0.30 | 2314qwen, of the |world|\n 00:22:21 - 99/99 | 0.00 | 0000aaaa, \n \"\"\"\nprint(my_txt)",
"\n 20:18:14 - 21/01 | 0.65 | 3947kedj, No |dia| em que eu saí de casa\n 25:32:26 - 11/07 | 0.80 | 5679lqui, Minha mãe me disse: |filho|, vem cá\n 12:13:00 - 12/06 | 0.65 | 5249dqok, Passou a mão em meu cabelos\n 23:12:35 - 13/03 | 0.77 | 3434afdf, Olhou em meus |olhos|, começou falar\n 20:22:00 - 12/02 | 0.98 | 1111absd, We are the champions, my friends\n 22:12:00 - 07/03 | 0.65 | 4092bvds, And we'll keep on |fighting| till the end\n 22:52:59 - 30/02 | 0.41 | 9021poij, We are the |champions|, we are the champions\n 21:47:00 - 28/03 | 0.15 | 6342fdpo, No time for |losers|, 'cause we are the champions\n 19:19:00 - 31/08 | 0.30 | 2314qwen, of the |world|\n 00:22:21 - 99/99 | 0.00 | 0000aaaa, \n \n"
],
[
"#\\g<name> to reference a group.\n\npattern = re.compile(r\"(?P<time>\\d{2}:\\d{2}:\\d{2}) - (?P<day>\\d{2}/\\d{2})\")\ntext = pattern.sub(\"\\g<day> - \\g<time>\",my_txt)\nprint(text)",
"\n 21/01 - 20:18:14 | 0.65 | 3947kedj, No |dia| em que eu saí de casa\n 11/07 - 25:32:26 | 0.80 | 5679lqui, Minha mãe me disse: |filho|, vem cá\n 12/06 - 12:13:00 | 0.65 | 5249dqok, Passou a mão em meu cabelos\n 13/03 - 23:12:35 | 0.77 | 3434afdf, Olhou em meus |olhos|, começou falar\n 12/02 - 20:22:00 | 0.98 | 1111absd, We are the champions, my friends\n 07/03 - 22:12:00 | 0.65 | 4092bvds, And we'll keep on |fighting| till the end\n 30/02 - 22:52:59 | 0.41 | 9021poij, We are the |champions|, we are the champions\n 28/03 - 21:47:00 | 0.15 | 6342fdpo, No time for |losers|, 'cause we are the champions\n 31/08 - 19:19:00 | 0.30 | 2314qwen, of the |world|\n 99/99 - 00:22:21 | 0.00 | 0000aaaa, \n \n"
],
[
"#pattern new_text texto\n\"\"\"\nChanges an optional whitespace with the - caracter to a comma (,).\n\"\"\"\ntext = re.sub(r\"\\s?-\", ',', my_txt)\nprint(text)",
"\n 20:18:14, 21/01 | 0.65 | 3947kedj, No |dia| em que eu saí de casa\n 25:32:26, 11/07 | 0.80 | 5679lqui, Minha mãe me disse: |filho|, vem cá\n 12:13:00, 12/06 | 0.65 | 5249dqok, Passou a mão em meu cabelos\n 23:12:35, 13/03 | 0.77 | 3434afdf, Olhou em meus |olhos|, começou falar\n 20:22:00, 12/02 | 0.98 | 1111absd, We are the champions, my friends\n 22:12:00, 07/03 | 0.65 | 4092bvds, And we'll keep on |fighting| till the end\n 22:52:59, 30/02 | 0.41 | 9021poij, We are the |champions|, we are the champions\n 21:47:00, 28/03 | 0.15 | 6342fdpo, No time for |losers|, 'cause we are the champions\n 19:19:00, 31/08 | 0.30 | 2314qwen, of the |world|\n 00:22:21, 99/99 | 0.00 | 0000aaaa, \n \n"
],
[
"#pattern new_text texto\n\n\"\"\"\nDo not forget to escape meaninful caracters :) \n\nthe dot character is escaped however, the pipe character is not :(\n\"\"\"\n\npattern = r\"(?P<time>\\d{2}:\\d{2}:\\d{2}) - (?P<day>\\d{2}/\\d{2}) | (?P<usage>\\d\\.\\d{2}) | (?P<id>\\d{4}\\w{4})\"\nnew_text = r\"\\g<time>, \\g<day>, \\g<usage>, \\g<id>\"\ntext = re.sub(pattern, new_text, my_txt)\n\nprint(text)",
"\n 20:18:14, 21/01, , |, , 0.65, |, , , 3947kedj, No |dia| em que eu saí de casa\n 25:32:26, 11/07, , |, , 0.80, |, , , 5679lqui, Minha mãe me disse: |filho|, vem cá\n 12:13:00, 12/06, , |, , 0.65, |, , , 5249dqok, Passou a mão em meu cabelos\n 23:12:35, 13/03, , |, , 0.77, |, , , 3434afdf, Olhou em meus |olhos|, começou falar\n 20:22:00, 12/02, , |, , 0.98, |, , , 1111absd, We are the champions, my friends\n 22:12:00, 07/03, , |, , 0.65, |, , , 4092bvds, And we'll keep on |fighting| till the end\n 22:52:59, 30/02, , |, , 0.41, |, , , 9021poij, We are the |champions|, we are the champions\n 21:47:00, 28/03, , |, , 0.15, |, , , 6342fdpo, No time for |losers|, 'cause we are the champions\n 19:19:00, 31/08, , |, , 0.30, |, , , 2314qwen, of the |world|\n 00:22:21, 99/99, , |, , 0.00, |, , , 0000aaaa, \n \n"
],
[
"#pattern new_text texto\n\npattern = \"(?P<time>\\d{2}:\\d{2}:\\d{2}) - (?P<day>\\d{2}/\\d{2}) \\| (?P<usage>\\d\\.\\d{2}) \\| (?P<id>\\d{4}\\w{4})\"\nnew_text = \"\\g<time>, \\g<day>, \\g<usage>, \\g<id>\"\ntext = re.sub(pattern, new_text, my_txt)\n\nprint(text)",
"\n 20:18:14, 21/01, 0.65, 3947kedj, No |dia| em que eu saí de casa\n 25:32:26, 11/07, 0.80, 5679lqui, Minha mãe me disse: |filho|, vem cá\n 12:13:00, 12/06, 0.65, 5249dqok, Passou a mão em meu cabelos\n 23:12:35, 13/03, 0.77, 3434afdf, Olhou em meus |olhos|, começou falar\n 20:22:00, 12/02, 0.98, 1111absd, We are the champions, my friends\n 22:12:00, 07/03, 0.65, 4092bvds, And we'll keep on |fighting| till the end\n 22:52:59, 30/02, 0.41, 9021poij, We are the |champions|, we are the champions\n 21:47:00, 28/03, 0.15, 6342fdpo, No time for |losers|, 'cause we are the champions\n 19:19:00, 31/08, 0.30, 2314qwen, of the |world|\n 00:22:21, 99/99, 0.00, 0000aaaa, \n \n"
]
],
[
[
"### **Subn**",
"_____no_output_____"
],
[
"Similar to ```sub```. It returns a tuple with the new string and the number of substitutions made. ",
"_____no_output_____"
]
],
[
[
"#pattern new_text texto\n\npattern = \"\\|\"\nnew_text = \"\"\nclean_txt, mistakes_count = re.subn(pattern, new_text, text)\n\nprint(f\"Clean Text Result:\\n{clean_txt}\")\nprint(f\"How many mistakes did I make it?\\n{mistakes_count}\")",
"Clean Text Result:\n\n 20:18:14, 21/01, 0.65, 3947kedj, No dia em que eu saí de casa\n 25:32:26, 11/07, 0.80, 5679lqui, Minha mãe me disse: filho, vem cá\n 12:13:00, 12/06, 0.65, 5249dqok, Passou a mão em meu cabelos\n 23:12:35, 13/03, 0.77, 3434afdf, Olhou em meus olhos, começou falar\n 20:22:00, 12/02, 0.98, 1111absd, We are the champions, my friends\n 22:12:00, 07/03, 0.65, 4092bvds, And we'll keep on fighting till the end\n 22:52:59, 30/02, 0.41, 9021poij, We are the champions, we are the champions\n 21:47:00, 28/03, 0.15, 6342fdpo, No time for losers, 'cause we are the champions\n 19:19:00, 31/08, 0.30, 2314qwen, of the world\n 00:22:21, 99/99, 0.00, 0000aaaa, \n \nHow many mistakes did I make it?\n14\n"
]
],
[
[
"### **Groupdict**",
"_____no_output_____"
]
],
[
[
"#pattern new_text texto\n\n\"\"\"\nDo not forget to escape meaninful caracters :) \n\nthe dot character is escaped however, the pipe character is not :(\n\"\"\"\n\npattern = r\"(?P<time>\\d{2}:\\d{2}:\\d{2}) - (?P<day>\\d{2}/\\d{2}) \\| (?P<usage>\\d\\.\\d{2}) \\| (?P<id>\\d{4}\\w{4})\"\nmatchs = re.finditer(pattern, my_txt)\n\nfor match in matchs:\n print(match.groupdict())",
"{'time': '20:18:14', 'day': '21/01', 'usage': '0.65', 'id': '3947kedj'}\n{'time': '25:32:26', 'day': '11/07', 'usage': '0.80', 'id': '5679lqui'}\n{'time': '12:13:00', 'day': '12/06', 'usage': '0.65', 'id': '5249dqok'}\n{'time': '23:12:35', 'day': '13/03', 'usage': '0.77', 'id': '3434afdf'}\n{'time': '20:22:00', 'day': '12/02', 'usage': '0.98', 'id': '1111absd'}\n{'time': '22:12:00', 'day': '07/03', 'usage': '0.65', 'id': '4092bvds'}\n{'time': '22:52:59', 'day': '30/02', 'usage': '0.41', 'id': '9021poij'}\n{'time': '21:47:00', 'day': '28/03', 'usage': '0.15', 'id': '6342fdpo'}\n{'time': '19:19:00', 'day': '31/08', 'usage': '0.30', 'id': '2314qwen'}\n{'time': '00:22:21', 'day': '99/99', 'usage': '0.00', 'id': '0000aaaa'}\n"
]
],
[
[
"## **Performance**",
"_____no_output_____"
],
[
">Programmers waste enormous amounts of time thinking about, or worrying\nabout, the speed of noncritical parts of their programs, and these attempts at\nefficiency actually have a strong negative impact when debugging and maintenance\nare considered. We should forget about small efficiencies, say about 97% of the\ntime: premature optimization is the root of all evil. Yet we should not pass up our\nopportunities in that critical 3%. - Donald Knuth",
"_____no_output_____"
],
[
"General:\n\n- Don't be greedy.\n- Reuse compiled patterns.\n- Be specific.",
"_____no_output_____"
],
[
"## References ",
"_____no_output_____"
],
[
"Book:\n- Mastering Python Regular Expressions (PACKT) - by: Félix López Víctor Romero\n\nLinks:\n- https://developers.google.com/edu/python/regular-expressions",
"_____no_output_____"
]
]
] |
[
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
4a66428180b9bda003f07d5b2a59ccf794a3e4a0
| 14,797 |
ipynb
|
Jupyter Notebook
|
bronze/.ipynb_checkpoints/B36_Quantum_Tomography-checkpoint.ipynb
|
dilyaraahmetshina/quantum_computings
|
a618bae55def65b17974f3ad402ce27817f91842
|
[
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
bronze/.ipynb_checkpoints/B36_Quantum_Tomography-checkpoint.ipynb
|
dilyaraahmetshina/quantum_computings
|
a618bae55def65b17974f3ad402ce27817f91842
|
[
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
bronze/.ipynb_checkpoints/B36_Quantum_Tomography-checkpoint.ipynb
|
dilyaraahmetshina/quantum_computings
|
a618bae55def65b17974f3ad402ce27817f91842
|
[
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | 38.634465 | 315 | 0.58424 |
[
[
[
"<table> <tr>\n <td style=\"background-color:#ffffff;\">\n <a href=\"http://qworld.lu.lv\" target=\"_blank\"><img src=\"..\\images\\qworld.jpg\" width=\"25%\" align=\"left\"> </a></td>\n <td style=\"background-color:#ffffff;vertical-align:bottom;text-align:right;\">\n prepared by <a href=\"http://abu.lu.lv\" target=\"_blank\">Abuzer Yakaryilmaz</a> (<a href=\"http://qworld.lu.lv/index.php/qlatvia/\" target=\"_blank\">QLatvia</a>)\n </td> \n</tr></table>",
"_____no_output_____"
],
[
"<table width=\"100%\"><tr><td style=\"color:#bbbbbb;background-color:#ffffff;font-size:11px;font-style:italic;text-align:right;\">This cell contains some macros. If there is a problem with displaying mathematical formulas, please run this cell to load these macros. </td></tr></table>\n$ \\newcommand{\\bra}[1]{\\langle #1|} $\n$ \\newcommand{\\ket}[1]{|#1\\rangle} $\n$ \\newcommand{\\braket}[2]{\\langle #1|#2\\rangle} $\n$ \\newcommand{\\dot}[2]{ #1 \\cdot #2} $\n$ \\newcommand{\\biginner}[2]{\\left\\langle #1,#2\\right\\rangle} $\n$ \\newcommand{\\mymatrix}[2]{\\left( \\begin{array}{#1} #2\\end{array} \\right)} $\n$ \\newcommand{\\myvector}[1]{\\mymatrix{c}{#1}} $\n$ \\newcommand{\\myrvector}[1]{\\mymatrix{r}{#1}} $\n$ \\newcommand{\\mypar}[1]{\\left( #1 \\right)} $\n$ \\newcommand{\\mybigpar}[1]{ \\Big( #1 \\Big)} $\n$ \\newcommand{\\sqrttwo}{\\frac{1}{\\sqrt{2}}} $\n$ \\newcommand{\\dsqrttwo}{\\dfrac{1}{\\sqrt{2}}} $\n$ \\newcommand{\\onehalf}{\\frac{1}{2}} $\n$ \\newcommand{\\donehalf}{\\dfrac{1}{2}} $\n$ \\newcommand{\\hadamard}{ \\mymatrix{rr}{ \\sqrttwo & \\sqrttwo \\\\ \\sqrttwo & -\\sqrttwo }} $\n$ \\newcommand{\\vzero}{\\myvector{1\\\\0}} $\n$ \\newcommand{\\vone}{\\myvector{0\\\\1}} $\n$ \\newcommand{\\stateplus}{\\myvector{ \\sqrttwo \\\\ \\sqrttwo } } $\n$ \\newcommand{\\stateminus}{ \\myrvector{ \\sqrttwo \\\\ -\\sqrttwo } } $\n$ \\newcommand{\\myarray}[2]{ \\begin{array}{#1}#2\\end{array}} $\n$ \\newcommand{\\X}{ \\mymatrix{cc}{0 & 1 \\\\ 1 & 0} } $\n$ \\newcommand{\\Z}{ \\mymatrix{rr}{1 & 0 \\\\ 0 & -1} } $\n$ \\newcommand{\\Htwo}{ \\mymatrix{rrrr}{ \\frac{1}{2} & \\frac{1}{2} & \\frac{1}{2} & \\frac{1}{2} \\\\ \\frac{1}{2} & -\\frac{1}{2} & \\frac{1}{2} & -\\frac{1}{2} \\\\ \\frac{1}{2} & \\frac{1}{2} & -\\frac{1}{2} & -\\frac{1}{2} \\\\ \\frac{1}{2} & -\\frac{1}{2} & -\\frac{1}{2} & \\frac{1}{2} } } $\n$ \\newcommand{\\CNOT}{ \\mymatrix{cccc}{1 & 0 & 0 & 0 \\\\ 0 & 1 & 0 & 0 \\\\ 0 & 0 & 0 & 1 \\\\ 0 & 0 & 1 & 0} } $\n$ \\newcommand{\\norm}[1]{ \\left\\lVert #1 \\right\\rVert } $\n$ \\newcommand{\\pstate}[1]{ \\lceil \\mspace{-1mu} #1 \\mspace{-1.5mu} \\rfloor } $",
"_____no_output_____"
],
[
"<h2>Quantum Tomography</h2>\n\nWe start with initializing a qubit with an arbitrary state by using a rotation.",
"_____no_output_____"
],
[
"<h3> Initialize a qubit with an arbitrary state </h3>\n\n",
"_____no_output_____"
],
[
"We can specify a (real-valued) quantum state by its angle ranged from 0 to $ 2\\pi $ radian.\n\nIf $ \\theta $ is our angle, then our quantum state is $ \\ket{v} = \\myvector{\\cos \\theta \\\\ \\sin \\theta} $.\n\n<b> How can we set a qubit to an arbitrary quantum state when started in state $ \\ket{0} $?</b>\n\nWe can use a rotation operator. Rotations preserve the lengths of vectors, and so they are quantum operators.\n\nIn qiskit, ry-gate can be used for rotation in 2-dimensional real-valued plane.",
"_____no_output_____"
],
[
"<a id=\"remark\"></a>\n<h3> Technical remark</h3>\n \nEven though, we focus on only real-valued quantum systems in this tutorial, the quantum state of a qubit is represented by 2-dimensional complex-valued vector in general. To visually represent a complex number, we use two dimensions. So, to visually represent the state of a qubit, we use four dimensions. \n \nOn the other hand, we can still visualize any state of a qubit by using certain mapping from four dimensions to three dimensions. This representation is called as <i>Bloch sphere</i>. \n\nThe rotation operators over a single (complex-valued) qubit are defined on Bloch sphere. The names of gates \"x\", \"y\", or \"z\" refer to the axes on Bloch sphere. When we focus on real-valued qubit, then we should be careful about the parameter(s) that a gate takes. \n\n<i>In qiskit, ry-gate makes a rotation around $y$-axis with the given angle, say $\\theta$, on Bloch sphere. This refers to a rotation in our real-valued $\\ket{0}$-$\\ket{1}$ plane with angle $ \\frac{\\theta}{2} $. Therefore, <b>we should provide the twice of the desired angle in this tutorial.</b></i>",
"_____no_output_____"
],
[
"<h3> Rotations with ry-gate </h3>\n\nThe ry-gate is used for rotation in 2-dimensional real-valued plane.\n\nIf our angle is $ \\theta $ radians, then we pass $ 2 \\theta $ radians as the parameter to ry-gate.\n\nThen ry-gate implements the rotation with angle $\\theta$. \n\nThe default direction of a rotation by ry-gate is counterclockwise.\n\n mycircuit.ry(2*angle_of_rotation,quantum_register)",
"_____no_output_____"
]
],
[
[
"from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit, execute, Aer\nfrom math import pi\n\n# we define a quantum circuit with one qubit and one bit\nqreg1 = QuantumRegister(1) # quantum register with a single qubit\ncreg1 = ClassicalRegister(1) # classical register with a single bit\nmycircuit1 = QuantumCircuit(qreg1,creg1) # quantum circuit with quantum and classical registers\n\n# angle of rotation in radian\nrotation_angle = 2*pi/3\n\n# rotate the qubit with rotation_angle\nmycircuit1.ry(2*rotation_angle,qreg1[0]) \n\n# measure the qubit\nmycircuit1.measure(qreg1,creg1)",
"_____no_output_____"
],
[
"# draw the circuit\nmycircuit1.draw(output='mpl')",
"_____no_output_____"
],
[
"# execute the program 1000 times\njob = execute(mycircuit1,Aer.get_backend('qasm_simulator'),shots=1000)\n\n# print the results\ncounts = job.result().get_counts(mycircuit1)\nprint(counts) # counts is a dictionary",
"_____no_output_____"
],
[
"from math import sin,cos\n\n# the quantum state\nquantum_state = [ cos(rotation_angle) , sin (rotation_angle) ]\n\nthe_expected_number_of_zeros = 1000*cos(rotation_angle)**2\nthe_expected_number_of_ones = 1000*sin(rotation_angle)**2\n\n# expected results\nprint(\"The expected value of observing '0' is\",round(the_expected_number_of_zeros,4))\nprint(\"The expected value of observing '1' is\",round(the_expected_number_of_ones,4))",
"_____no_output_____"
],
[
"# draw the quantum state\n\n%run qlatvia.py\n\ndraw_qubit()\n\ndraw_quantum_state(quantum_state[0],quantum_state[1],\"|v>\")",
"_____no_output_____"
]
],
[
[
"<h3> Task 1 </h3>\n\nYou are given 1000 copies of an arbitrary quantum state which lies in the first or second quadrant of the unit circle.\n\nThis quantum state can be represented by an angle $ \\theta \\in [0,180) $.\n\n<i>Please execute the following cell, but do not check the value of $\\theta$.</i>",
"_____no_output_____"
]
],
[
[
"from random import randrange\nfrom math import pi\n \ntheta = randrange(18000)/18000 * pi",
"_____no_output_____"
]
],
[
[
"Your task is to guess this quantum state by writing quantum programs.\n\nWe assume that the quantum state is given to us with the following code. \n\n from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit, execute, Aer\n\n # we define a quantum circuit with one qubit and one bit\n qreg2 = QuantumRegister(1) # quantum register with a single qubit\n creg2 = ClassicalRegister(1) # classical register with a single bit\n circuit2 = QuantumCircuit(qreg2,creg2) # quantum circuit with quantum and classical registers\n\n # rotate the qubit with rotation_angle\n circuit2.ry(2*theta,qreg2[0])\n\nYou should write further codes without using variable $theta$ again.\n\nYou may use measurements or further $ry$-gates.\n\nYou can use 1000 shots in total when executing your quantum programs (you can have more than one program starting with the above code).\n\nAfter your guess, please check the actual value and calculate your error in percentage.",
"_____no_output_____"
]
],
[
[
"# program 1\n\nfrom qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit, execute, Aer\nfrom math import pi\n\n# we define a quantum circuit with one qubit and one bit\nqreg1 = QuantumRegister(1) # quantum register with a single qubit\ncreg1 = ClassicalRegister(1) # classical register with a single bit\ncircuit1 = QuantumCircuit(qreg1,creg1) # quantum circuit with quantum and classical registers\n\n# rotate the qubit with rotation_angle\ncircuit1.ry(2*theta,qreg1[0]) \n\n#\n# your code is here\n#",
"_____no_output_____"
],
[
"# program 2\n\nfrom qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit, execute, Aer\nfrom math import pi\n\n# we define a quantum circuit with one qubit and one bit\nqreg2 = QuantumRegister(1) # quantum register with a single qubit\ncreg2 = ClassicalRegister(1) # classical register with a single bit\ncircuit2 = QuantumCircuit(qreg2,creg2) # quantum circuit with quantum and classical registers\n\n# rotate the qubit with rotation_angle\ncircuit2.ry(2*theta,qreg2[0]) \n",
"_____no_output_____"
],
[
"# program 3\n\nfrom qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit, execute, Aer\nfrom math import pi\n\n# we define a quantum circuit with one qubit and one bit\nqreg3 = QuantumRegister(1) # quantum register with a single qubit\ncreg3 = ClassicalRegister(1) # classical register with a single bit\ncircuit3 = QuantumCircuit(qreg3,creg3) # quantum circuit with quantum and classical registers\n\n# rotate the qubit with rotation_angle\ncircuit3.ry(2*theta,qreg3[0]) \n",
"_____no_output_____"
]
],
[
[
"<a href=\"B36_Quantum_Tomography_Solution.ipynb#task1\">click for our solution</a>",
"_____no_output_____"
],
[
"<h3> Task 2 [extra] </h3>\n\nIn Task 1, assume that you are given two qubits that are in states $ \\myvector{\\cos \\theta_1 \\\\ \\sin \\theta_1} $ and $ \\myvector{\\cos \\theta_2 \\\\ \\sin \\theta_2} $, where $ \\theta_1,\\theta_2 \\in [0,\\pi) $.\n\nBy following the same assumptions in Task 1, can you approximate $ \\theta_1 $ and $ \\theta_2 $ by using qiskit?\n\nYour circuit should have a quantum register with these two qubits, and so your measurement outcomes will be '00', '01', '10', and '11'.",
"_____no_output_____"
],
[
"<h3> Task 3 (Discussion) </h3>\n\nIf the angle in Task 1 is picked in range $ [0,360) $, then can we determine its quadrant correctly?",
"_____no_output_____"
],
[
"<h3> Global phase </h3>\n\nSuppose that we have a qubit and its state is either $ \\ket{0} $ or $ -\\ket{0} $.\n\nIs there any sequence of one-qubit gates such that we can measuare different results after applying them?\n\nAll one-qubit gates are $ 2 \\times 2 $ matrices, and their application is represented by a single matrix: $ A_n \\cdot \\cdots \\cdot A_2 \\cdot A_1 = A $.\n\nBy linearity, if $ A \\ket{0} = \\ket{u} $, then $ A - \\ket{0} = -\\ket{u} $. Thus, after measurement, the probabilities of observing state $ \\ket{0} $ and state $ \\ket{1} $ are the same. Therefore, we cannot distinguish them.\n\nEven though the states $ \\ket{0} $ and $ -\\ket{0} $ are different mathematically, they are assumed the same from the physical point of view. \n\nThe minus sign in front of $ -\\ket{0} $ is also called as global phase.",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
4a66458025137c2479e0067dd13131e80f4dd010
| 110,602 |
ipynb
|
Jupyter Notebook
|
16 Textanalyse Verfiefung Teil 2/2.4 Classifying Text.ipynb
|
Tamibobi/kurs_21_22
|
8f4bfc493f3c811762e67dacd29cfb9076592517
|
[
"MIT"
] | null | null | null |
16 Textanalyse Verfiefung Teil 2/2.4 Classifying Text.ipynb
|
Tamibobi/kurs_21_22
|
8f4bfc493f3c811762e67dacd29cfb9076592517
|
[
"MIT"
] | null | null | null |
16 Textanalyse Verfiefung Teil 2/2.4 Classifying Text.ipynb
|
Tamibobi/kurs_21_22
|
8f4bfc493f3c811762e67dacd29cfb9076592517
|
[
"MIT"
] | null | null | null | 53.046523 | 53,171 | 0.547151 |
[
[
[
"# Document classifier \nPraktisch wenn eine Menge Dokumente sortiert werden muss",
"_____no_output_____"
],
[
"## Daten\n- Wir brauchen zuerst daten um unser Modell zu trainieren",
"_____no_output_____"
]
],
[
[
"#!pip3 install -U textblob",
"_____no_output_____"
],
[
"from textblob.classifiers import NaiveBayesClassifier\n\ntrain = [\n ('I love this sandwich.', 'pos'),\n ('this is an amazing place!', 'pos'),\n ('I feel very good about these beers.', 'pos'),\n ('this is my best work.', 'pos'),\n (\"what an awesome view\", 'pos'),\n ('I do not like this restaurant', 'neg'),\n ('I am tired of this stuff.', 'neg'),\n (\"I can't deal with this\", 'neg'),\n ('He is my sworn enemy!', 'neg'),\n ('My boss is horrible.', 'neg')\n]\ntest = [\n ('The beer was good.', 'pos'),\n ('I do not enjoy my job', 'neg'),\n (\"I ain't feeling dandy today.\", 'neg'),\n (\"I feel amazing!\", 'pos'),\n ('Gary is a friend of mine.', 'pos'),\n (\"I can't believe I'm doing this.\", 'neg')\n]",
"_____no_output_____"
]
],
[
[
"## Training",
"_____no_output_____"
]
],
[
[
"a = NaiveBayesClassifier(train)",
"_____no_output_____"
]
],
[
[
"## Test\n- Wie gut performed unser Modell bei Daten die es noch nie gesehen hat?",
"_____no_output_____"
]
],
[
[
"a.accuracy(test)",
"_____no_output_____"
]
],
[
[
"- Zu 80% korrekt, ok für mich :)",
"_____no_output_____"
],
[
"## Features\n- Welche wörter sorgen am meisten dafür dass etwas positiv oder negativ klassifiziert wird?",
"_____no_output_____"
]
],
[
[
"a.show_informative_features(5)",
"Most Informative Features\n contains(an) = False neg : pos = 1.6 : 1.0\n contains(I) = False pos : neg = 1.4 : 1.0\n contains(I) = True neg : pos = 1.4 : 1.0\n contains(He) = False pos : neg = 1.2 : 1.0\n contains(My) = False pos : neg = 1.2 : 1.0\n"
]
],
[
[
"Er ist der meinung wenn \"this\" vorkommt ist es eher positiv, was natürlich quatsch ist, aber das hat er nun mal so gelernt, deswegen braucht ihr gute trainingsdaten. ",
"_____no_output_____"
],
[
"## Klassifizierung",
"_____no_output_____"
]
],
[
[
"a.classify(\"their burgers are amazing\") # \"pos\"",
"_____no_output_____"
],
[
"a.classify(\"I don't like their pizza.\") # \"neg\"",
"_____no_output_____"
],
[
"a.classify(\"I love my job.\")",
"_____no_output_____"
]
],
[
[
"### Klassizierung nach Sätzen",
"_____no_output_____"
]
],
[
[
"from textblob import TextBlob\nblob = TextBlob(\"The beer was amazing. \"\n \"But the hangover was horrible. My boss was not happy.\",\n classifier=a)",
"_____no_output_____"
],
[
"for sentence in blob.sentences:\n print((\"%s (%s)\") % (sentence,sentence.classify()))",
"The beer was amazing. (pos)\nBut the hangover was horrible. (neg)\nMy boss was not happy. (neg)\n"
]
],
[
[
"## Mit schweizer Songtexten Kommentare klassifizieren\n- http://www.falleri.ch",
"_____no_output_____"
]
],
[
[
"import os,glob\nfrom nltk.tokenize import sent_tokenize\nfrom nltk.tokenize import word_tokenize\n\n\nfrom io import open\n\n\ntrain = []\ncountries = [\"schweiz\", \"deutschland\"]\nfor country in countries:\n out = []\n folder_path = 'songtexte/%s' % country \n for filename in glob.glob(os.path.join(folder_path, '*.txt')):\n with open(filename, 'r') as f:\n text = f.read()\n words = word_tokenize(text)\n words=[word.lower() for word in words if word.isalpha()]\n for word in words:\n out.append(word)\n out = set(out)\n for word in out:\n train.append((word,country))\n #print (filename)\n #print (len(text))\ntrain",
"_____no_output_____"
],
[
"#len(train)",
"_____no_output_____"
],
[
"from textblob.classifiers import NaiveBayesClassifier\nc2 = NaiveBayesClassifier(train)",
"_____no_output_____"
],
[
"c2.classify(\"Ich gehe durch den Wald\") # \"deutsch\"",
"_____no_output_____"
],
[
"c2.classify(\"Häsch es guet\") # \"deutsch\"",
"_____no_output_____"
],
[
"c2.classify(\"Ich fahre mit meinem Porsche auf der Autobahn.\")",
"_____no_output_____"
],
[
"c2.show_informative_features(5)",
"Most Informative Features\n contains(am) = True schwei : deutsc = 1.3 : 1.0\n contains(bei) = True schwei : deutsc = 1.3 : 1.0\n contains(das) = True schwei : deutsc = 1.3 : 1.0\n contains(dir) = True schwei : deutsc = 1.3 : 1.0\n contains(du) = True schwei : deutsc = 1.3 : 1.0\n"
]
],
[
[
"### Orakel ",
"_____no_output_____"
],
[
"Ihr könnt natürlich jetzt Euer eigenes Orakel bauen wie hier: - http://home.datacomm.ch/cgi-heeb/dialect/chochi.pl?Hand=Hand&nicht=net&heute=hit&Fenster=Feischter&gestern=gescht&Abend=Abend&gehorchen=folge&Mond=Manat&jeweils=abe&Holzsplitter=Schepfa&Senden=Jetzt+analysieren%21",
"_____no_output_____"
],
[
"## Hardcore Beispiel mit Film-review daten mit NLTK\n- https://www.nltk.org/book/ch06.html\n- Wir nutzen nur noch die 100 häufigsten Wörter in den Texten und schauen ob sie bei positiv oder negativ vorkommen",
"_____no_output_____"
]
],
[
[
"import random\nimport nltk\nnltk.download(\"movie_reviews\")\nfrom nltk.corpus import movie_reviews\ndocuments = [(list(movie_reviews.words(fileid)), category)\n for category in movie_reviews.categories()\n for fileid in movie_reviews.fileids(category)]\nrandom.shuffle(documents)",
"[nltk_data] Downloading package movie_reviews to\n[nltk_data] /Users/plotti/nltk_data...\n[nltk_data] Package movie_reviews is already up-to-date!\n"
],
[
"len(documents)",
"_____no_output_____"
],
[
"(\" \").join(documents[1][0])",
"_____no_output_____"
],
[
"all_words = nltk.FreqDist(w.lower() for w in movie_reviews.words())\nword_features = list(all_words)[:2000]\nword_features[0:-10]",
"_____no_output_____"
],
[
"all_words",
"_____no_output_____"
],
[
"all_words = nltk.FreqDist(w.lower() for w in movie_reviews.words())\nword_features = list(all_words)[:2000]\n\ndef document_features(document): \n document_words = set(document)\n features = {}\n for word in word_features:\n features['contains({})'.format(word)] = (word in document_words)\n return features",
"_____no_output_____"
],
[
"print(document_features(movie_reviews.words('pos/cv957_8737.txt')))",
"{'contains(,)': True, 'contains(the)': True, 'contains(.)': True, 'contains(a)': True, 'contains(and)': True, 'contains(of)': True, 'contains(to)': True, \"contains(')\": True, 'contains(is)': True, 'contains(in)': True, 'contains(s)': True, 'contains(\")': True, 'contains(it)': True, 'contains(that)': True, 'contains(-)': True, 'contains())': True, 'contains(()': True, 'contains(as)': True, 'contains(with)': True, 'contains(for)': True, 'contains(his)': True, 'contains(this)': True, 'contains(film)': False, 'contains(i)': False, 'contains(he)': True, 'contains(but)': True, 'contains(on)': True, 'contains(are)': True, 'contains(t)': False, 'contains(by)': True, 'contains(be)': True, 'contains(one)': True, 'contains(movie)': True, 'contains(an)': True, 'contains(who)': True, 'contains(not)': True, 'contains(you)': True, 'contains(from)': True, 'contains(at)': False, 'contains(was)': False, 'contains(have)': True, 'contains(they)': True, 'contains(has)': True, 'contains(her)': False, 'contains(all)': True, 'contains(?)': False, 'contains(there)': True, 'contains(like)': True, 'contains(so)': False, 'contains(out)': True, 'contains(about)': True, 'contains(up)': False, 'contains(more)': False, 'contains(what)': True, 'contains(when)': True, 'contains(which)': True, 'contains(or)': False, 'contains(she)': True, 'contains(their)': False, 'contains(:)': True, 'contains(some)': False, 'contains(just)': True, 'contains(can)': False, 'contains(if)': True, 'contains(we)': False, 'contains(him)': True, 'contains(into)': True, 'contains(even)': False, 'contains(only)': True, 'contains(than)': False, 'contains(no)': False, 'contains(good)': False, 'contains(time)': False, 'contains(most)': True, 'contains(its)': False, 'contains(will)': True, 'contains(story)': False, 'contains(would)': False, 'contains(been)': False, 'contains(much)': False, 'contains(character)': False, 'contains(also)': True, 'contains(get)': True, 'contains(other)': True, 'contains(do)': True, 'contains(two)': True, 'contains(well)': True, 'contains(them)': True, 'contains(very)': True, 'contains(characters)': False, 'contains(;)': False, 'contains(first)': False, 'contains(--)': True, 'contains(after)': False, 'contains(see)': False, 'contains(!)': True, 'contains(way)': True, 'contains(because)': False, 'contains(make)': True, 'contains(life)': False, 'contains(off)': False, 'contains(too)': False, 'contains(any)': False, 'contains(does)': False, 'contains(really)': False, 'contains(had)': False, 'contains(while)': True, 'contains(films)': False, 'contains(how)': True, 'contains(plot)': True, 'contains(little)': True, 'contains(where)': True, 'contains(people)': False, 'contains(over)': False, 'contains(could)': False, 'contains(then)': True, 'contains(me)': True, 'contains(scene)': True, 'contains(man)': False, 'contains(bad)': False, 'contains(my)': False, 'contains(never)': True, 'contains(being)': False, 'contains(best)': True, 'contains(these)': False, 'contains(don)': False, 'contains(new)': False, 'contains(doesn)': False, 'contains(scenes)': False, 'contains(many)': True, 'contains(director)': False, 'contains(such)': False, 'contains(know)': False, 'contains(were)': False, 'contains(movies)': True, 'contains(through)': False, 'contains(here)': True, 'contains(action)': True, 'contains(great)': True, 'contains(re)': True, 'contains(another)': False, 'contains(love)': False, 'contains(go)': False, 'contains(made)': False, 'contains(us)': True, 'contains(big)': False, 'contains(end)': False, 'contains(something)': False, 'contains(back)': False, 'contains(*)': True, 'contains(still)': False, 'contains(world)': True, 'contains(seems)': False, 'contains(work)': False, 'contains(those)': False, 'contains(makes)': False, 'contains(now)': False, 'contains(before)': False, 'contains(however)': True, 'contains(between)': True, 'contains(few)': False, 'contains(/)': False, 'contains(down)': False, 'contains(every)': False, 'contains(though)': False, 'contains(better)': False, 'contains(real)': False, 'contains(audience)': False, 'contains(enough)': False, 'contains(seen)': False, 'contains(take)': False, 'contains(around)': False, 'contains(both)': False, 'contains(going)': False, 'contains(year)': False, 'contains(performance)': False, 'contains(why)': False, 'contains(should)': False, 'contains(role)': False, 'contains(isn)': False, 'contains(same)': True, 'contains(old)': False, 'contains(gets)': True, 'contains(your)': False, 'contains(may)': False, 'contains(things)': True, 'contains(think)': False, 'contains(years)': False, 'contains(last)': False, 'contains(comedy)': True, 'contains(funny)': True, 'contains(actually)': True, 'contains(ve)': False, 'contains(long)': False, 'contains(look)': True, 'contains(almost)': False, 'contains(own)': True, 'contains(thing)': False, 'contains(fact)': False, 'contains(nothing)': False, 'contains(say)': False, 'contains(right)': False, 'contains(john)': False, 'contains(although)': False, 'contains(played)': True, 'contains(find)': False, 'contains(script)': False, 'contains(come)': False, 'contains(ever)': True, 'contains(cast)': False, 'contains(since)': False, 'contains(did)': False, 'contains(star)': False, 'contains(plays)': False, 'contains(young)': False, 'contains(show)': False, 'contains(comes)': False, 'contains(m)': False, 'contains(part)': False, 'contains(original)': False, 'contains(actors)': False, 'contains(screen)': True, 'contains(without)': False, 'contains(again)': False, 'contains(acting)': False, 'contains(three)': False, 'contains(day)': True, 'contains(each)': True, 'contains(point)': False, 'contains(lot)': False, 'contains(least)': True, 'contains(takes)': False, 'contains(guy)': True, 'contains(quite)': False, 'contains(himself)': False, 'contains(away)': False, 'contains(during)': False, 'contains(family)': False, 'contains(effects)': False, 'contains(course)': True, 'contains(goes)': False, 'contains(minutes)': False, 'contains(interesting)': False, 'contains(might)': False, 'contains(far)': False, 'contains(high)': False, 'contains(rather)': False, 'contains(once)': True, 'contains(must)': False, 'contains(anything)': False, 'contains(place)': True, 'contains(set)': False, 'contains(yet)': False, 'contains(watch)': True, 'contains(d)': False, 'contains(making)': True, 'contains(our)': False, 'contains(wife)': True, 'contains(hard)': False, 'contains(always)': False, 'contains(fun)': True, 'contains(didn)': False, 'contains(ll)': False, 'contains(seem)': False, 'contains(special)': False, 'contains(bit)': False, 'contains(times)': False, 'contains(trying)': False, 'contains(hollywood)': False, 'contains(instead)': False, 'contains(give)': False, 'contains(want)': False, 'contains(picture)': False, 'contains(kind)': True, 'contains(american)': False, 'contains(job)': False, 'contains(sense)': False, 'contains(woman)': True, 'contains(home)': False, 'contains(having)': False, 'contains(series)': True, 'contains(actor)': False, 'contains(probably)': False, 'contains(help)': True, 'contains(half)': False, 'contains(along)': True, 'contains(men)': False, 'contains(everything)': True, 'contains(pretty)': False, 'contains(becomes)': False, 'contains(sure)': False, 'contains(black)': False, 'contains(together)': False, 'contains(dialogue)': False, 'contains(money)': False, 'contains(become)': False, 'contains(gives)': False, 'contains(given)': False, 'contains(looking)': False, 'contains(whole)': False, 'contains(watching)': False, 'contains(father)': False, 'contains(`)': False, 'contains(feel)': False, 'contains(everyone)': False, 'contains(music)': False, 'contains(wants)': False, 'contains(sex)': False, 'contains(less)': False, 'contains(done)': False, 'contains(horror)': False, 'contains(got)': True, 'contains(death)': False, 'contains(perhaps)': False, 'contains(city)': False, 'contains(next)': False, 'contains(especially)': True, 'contains(play)': False, 'contains(girl)': False, 'contains(mind)': False, 'contains(10)': False, 'contains(moments)': False, 'contains(looks)': True, 'contains(completely)': False, 'contains(2)': False, 'contains(reason)': False, 'contains(mother)': False, 'contains(whose)': False, 'contains(line)': False, 'contains(night)': False, 'contains(human)': False, 'contains(until)': False, 'contains(rest)': False, 'contains(performances)': False, 'contains(different)': False, 'contains(evil)': False, 'contains(small)': False, 'contains(james)': False, 'contains(simply)': False, 'contains(couple)': False, 'contains(put)': False, 'contains(let)': False, 'contains(anyone)': False, 'contains(ending)': False, 'contains(case)': False, 'contains(several)': False, 'contains(dead)': False, 'contains(michael)': False, 'contains(left)': False, 'contains(thought)': False, 'contains(school)': False, 'contains(shows)': False, 'contains(humor)': False, 'contains(true)': False, 'contains(lost)': False, 'contains(written)': False, 'contains(itself)': False, 'contains(friend)': False, 'contains(entire)': False, 'contains(getting)': True, 'contains(town)': False, 'contains(turns)': False, 'contains(soon)': False, 'contains(someone)': False, 'contains(second)': False, 'contains(main)': False, 'contains(stars)': False, 'contains(found)': False, 'contains(use)': False, 'contains(problem)': False, 'contains(friends)': True, 'contains(tv)': False, 'contains(top)': True, 'contains(name)': False, 'contains(begins)': False, 'contains(called)': False, 'contains(based)': False, 'contains(comic)': False, 'contains(david)': False, 'contains(head)': False, 'contains(else)': False, 'contains(idea)': True, 'contains(either)': False, 'contains(wrong)': True, 'contains(unfortunately)': False, 'contains(later)': False, 'contains(final)': False, 'contains(hand)': False, 'contains(alien)': False, 'contains(house)': False, 'contains(group)': False, 'contains(full)': False, 'contains(used)': True, 'contains(tries)': True, 'contains(often)': True, 'contains(against)': False, 'contains(war)': False, 'contains(sequence)': False, 'contains(keep)': False, 'contains(turn)': False, 'contains(playing)': True, 'contains(boy)': False, 'contains(behind)': False, 'contains(named)': False, 'contains(certainly)': False, 'contains(live)': False, 'contains(believe)': False, 'contains(under)': False, 'contains(works)': False, 'contains(relationship)': False, 'contains(face)': False, 'contains(hour)': False, 'contains(run)': False, 'contains(style)': False, 'contains(said)': False, 'contains(despite)': False, 'contains(person)': False, 'contains(finally)': False, 'contains(shot)': False, 'contains(book)': False, 'contains(doing)': False, 'contains(tell)': False, 'contains(maybe)': False, 'contains(nice)': False, 'contains(son)': False, 'contains(perfect)': False, 'contains(side)': False, 'contains(seeing)': True, 'contains(able)': False, 'contains(finds)': False, 'contains(children)': False, 'contains(days)': False, 'contains(past)': False, 'contains(summer)': False, 'contains(camera)': False, 'contains(won)': False, 'contains(including)': False, 'contains(mr)': False, 'contains(kids)': False, 'contains(lives)': False, 'contains(directed)': False, 'contains(moment)': False, 'contains(game)': False, 'contains(running)': False, 'contains(fight)': True, 'contains(supposed)': False, 'contains(video)': False, 'contains(car)': False, 'contains(matter)': False, 'contains(kevin)': True, 'contains(joe)': False, 'contains(lines)': False, 'contains(worth)': True, 'contains(=)': False, 'contains(daughter)': False, 'contains(earth)': False, 'contains(starts)': False, 'contains(need)': False, 'contains(entertaining)': False, 'contains(white)': False, 'contains(start)': True, 'contains(writer)': False, 'contains(dark)': False, 'contains(short)': False, 'contains(self)': False, 'contains(worst)': False, 'contains(nearly)': False, 'contains(opening)': False, 'contains(try)': False, 'contains(upon)': False, 'contains(care)': False, 'contains(early)': True, 'contains(violence)': False, 'contains(throughout)': False, 'contains(team)': False, 'contains(production)': False, 'contains(example)': False, 'contains(beautiful)': False, 'contains(title)': False, 'contains(exactly)': False, 'contains(jack)': False, 'contains(review)': False, 'contains(major)': False, 'contains(drama)': False, 'contains(&)': False, 'contains(problems)': True, 'contains(sequences)': False, 'contains(obvious)': False, 'contains(version)': False, 'contains(screenplay)': False, 'contains(known)': True, 'contains(killer)': False, 'contains(wasn)': False, 'contains(robert)': False, 'contains(disney)': False, 'contains(already)': False, 'contains(close)': False, 'contains(classic)': False, 'contains(others)': True, 'contains(hit)': False, 'contains(kill)': False, 'contains(deep)': True, 'contains(five)': False, 'contains(order)': False, 'contains(act)': False, 'contains(simple)': False, 'contains(fine)': False, 'contains(themselves)': False, 'contains(heart)': False, 'contains(roles)': False, 'contains(jackie)': True, 'contains(direction)': False, 'contains(eyes)': False, 'contains(four)': False, 'contains(question)': False, 'contains(sort)': False, 'contains(sometimes)': False, 'contains(knows)': False, 'contains(supporting)': False, 'contains(coming)': False, 'contains(voice)': False, 'contains(women)': False, 'contains(truly)': False, 'contains(save)': False, 'contains(jokes)': False, 'contains(computer)': False, 'contains(child)': False, 'contains(o)': False, 'contains(boring)': False, 'contains(tom)': False, 'contains(level)': False, 'contains(1)': False, 'contains(body)': False, 'contains(guys)': False, 'contains(genre)': False, 'contains(brother)': False, 'contains(strong)': False, 'contains(stop)': True, 'contains(room)': False, 'contains(space)': False, 'contains(lee)': False, 'contains(ends)': False, 'contains(beginning)': False, 'contains(ship)': False, 'contains(york)': False, 'contains(attempt)': False, 'contains(thriller)': False, 'contains(scream)': True, 'contains(peter)': False, 'contains(aren)': False, 'contains(husband)': False, 'contains(fiction)': False, 'contains(happens)': False, 'contains(hero)': False, 'contains(novel)': False, 'contains(note)': False, 'contains(hope)': False, 'contains(king)': False, 'contains(yes)': False, 'contains(says)': False, 'contains(tells)': False, 'contains(quickly)': False, 'contains(romantic)': False, 'contains(dog)': False, 'contains(oscar)': False, 'contains(stupid)': False, 'contains(possible)': False, 'contains(saw)': False, 'contains(lead)': True, 'contains(career)': False, 'contains(murder)': False, 'contains(extremely)': False, 'contains(manages)': False, 'contains(god)': False, 'contains(mostly)': False, 'contains(wonder)': False, 'contains(particularly)': False, 'contains(future)': False, 'contains(fans)': False, 'contains(sound)': False, 'contains(worse)': False, 'contains(piece)': False, 'contains(involving)': False, 'contains(de)': False, 'contains(appears)': False, 'contains(planet)': False, 'contains(paul)': False, 'contains(involved)': False, 'contains(mean)': False, 'contains(none)': False, 'contains(taking)': False, 'contains(hours)': False, 'contains(laugh)': True, 'contains(police)': False, 'contains(sets)': False, 'contains(attention)': False, 'contains(co)': False, 'contains(hell)': False, 'contains(eventually)': False, 'contains(single)': False, 'contains(fall)': False, 'contains(falls)': False, 'contains(material)': False, 'contains(emotional)': False, 'contains(power)': False, 'contains(late)': False, 'contains(lack)': False, 'contains(dr)': False, 'contains(van)': False, 'contains(result)': False, 'contains(elements)': False, 'contains(meet)': False, 'contains(smith)': False, 'contains(science)': False, 'contains(experience)': False, 'contains(bring)': False, 'contains(wild)': False, 'contains(living)': False, 'contains(theater)': False, 'contains(interest)': False, 'contains(leads)': False, 'contains(word)': False, 'contains(feature)': False, 'contains(battle)': False, 'contains(girls)': False, 'contains(alone)': False, 'contains(obviously)': False, 'contains(george)': False, 'contains(within)': False, 'contains(usually)': False, 'contains(enjoy)': False, 'contains(guess)': False, 'contains(among)': True, 'contains(taken)': False, 'contains(feeling)': False, 'contains(laughs)': False, 'contains(aliens)': False, 'contains(talk)': True, 'contains(chance)': False, 'contains(talent)': False, 'contains(3)': False, 'contains(middle)': False, 'contains(number)': False, 'contains(easy)': False, 'contains(across)': False, 'contains(needs)': False, 'contains(attempts)': False, 'contains(happen)': False, 'contains(television)': False, 'contains(chris)': False, 'contains(deal)': False, 'contains(poor)': False, 'contains(form)': False, 'contains(girlfriend)': True, 'contains(viewer)': False, 'contains(release)': False, 'contains(killed)': False, 'contains(forced)': False, 'contains(whether)': False, 'contains(wonderful)': False, 'contains(feels)': False, 'contains(oh)': False, 'contains(tale)': False, 'contains(serious)': False, 'contains(expect)': False, 'contains(except)': False, 'contains(light)': False, 'contains(success)': False, 'contains(features)': True, 'contains(premise)': False, 'contains(happy)': False, 'contains(words)': False, 'contains(leave)': False, 'contains(important)': False, 'contains(meets)': False, 'contains(history)': False, 'contains(giving)': False, 'contains(crew)': False, 'contains(type)': False, 'contains(call)': False, 'contains(turned)': False, 'contains(released)': False, 'contains(parents)': False, 'contains(art)': False, 'contains(impressive)': False, 'contains(mission)': False, 'contains(working)': False, 'contains(seemed)': False, 'contains(score)': False, 'contains(told)': False, 'contains(recent)': False, 'contains(robin)': False, 'contains(basically)': False, 'contains(entertainment)': False, 'contains(america)': False, 'contains($)': False, 'contains(surprise)': False, 'contains(apparently)': False, 'contains(easily)': False, 'contains(ryan)': False, 'contains(cool)': False, 'contains(stuff)': False, 'contains(cop)': False, 'contains(change)': False, 'contains(williams)': False, 'contains(crime)': False, 'contains(office)': False, 'contains(parts)': False, 'contains(somehow)': False, 'contains(sequel)': False, 'contains(william)': False, 'contains(cut)': False, 'contains(die)': False, 'contains(jones)': False, 'contains(credits)': False, 'contains(batman)': False, 'contains(suspense)': False, 'contains(brings)': False, 'contains(events)': False, 'contains(reality)': False, 'contains(whom)': False, 'contains(local)': False, 'contains(talking)': False, 'contains(difficult)': True, 'contains(using)': False, 'contains(went)': False, 'contains(writing)': False, 'contains(remember)': False, 'contains(near)': False, 'contains(straight)': False, 'contains(hilarious)': True, 'contains(ago)': False, 'contains(certain)': False, 'contains(ben)': False, 'contains(kid)': False, 'contains(wouldn)': False, 'contains(slow)': True, 'contains(blood)': False, 'contains(mystery)': False, 'contains(complete)': False, 'contains(red)': False, 'contains(popular)': False, 'contains(effective)': False, 'contains(am)': False, 'contains(fast)': True, 'contains(flick)': False, 'contains(due)': False, 'contains(runs)': False, 'contains(gone)': False, 'contains(return)': False, 'contains(presence)': False, 'contains(quality)': False, 'contains(dramatic)': False, 'contains(filmmakers)': False, 'contains(age)': False, 'contains(brothers)': False, 'contains(business)': False, 'contains(general)': False, 'contains(rock)': False, 'contains(sexual)': False, 'contains(present)': False, 'contains(surprisingly)': False, 'contains(anyway)': False, 'contains(uses)': False, 'contains(4)': False, 'contains(personal)': False, 'contains(figure)': False, 'contains(smart)': False, 'contains(ways)': False, 'contains(decides)': False, 'contains(annoying)': False, 'contains(begin)': False, 'contains(couldn)': False, 'contains(somewhat)': False, 'contains(shots)': False, 'contains(rich)': False, 'contains(minute)': False, 'contains(law)': False, 'contains(previous)': False, 'contains(jim)': False, 'contains(successful)': False, 'contains(harry)': False, 'contains(water)': False, 'contains(similar)': False, 'contains(absolutely)': False, 'contains(motion)': False, 'contains(former)': False, 'contains(strange)': False, 'contains(came)': False, 'contains(follow)': False, 'contains(read)': False, 'contains(project)': False, 'contains(million)': True, 'contains(secret)': False, 'contains(starring)': False, 'contains(clear)': False, 'contains(familiar)': False, 'contains(romance)': False, 'contains(intelligent)': False, 'contains(third)': True, 'contains(excellent)': False, 'contains(amazing)': False, 'contains(party)': False, 'contains(budget)': False, 'contains(eye)': False, 'contains(actress)': False, 'contains(prison)': False, 'contains(latest)': False, 'contains(means)': True, 'contains(company)': False, 'contains(towards)': False, 'contains(predictable)': False, 'contains(powerful)': False, 'contains(nor)': False, 'contains(bob)': False, 'contains(beyond)': False, 'contains(visual)': False, 'contains(leaves)': False, 'contains(r)': False, 'contains(nature)': False, 'contains(following)': False, 'contains(villain)': False, 'contains(leaving)': False, 'contains(animated)': False, 'contains(low)': False, 'contains(myself)': False, 'contains(b)': False, 'contains(bill)': False, 'contains(sam)': False, 'contains(filled)': False, 'contains(wars)': False, 'contains(questions)': False, 'contains(cinema)': False, 'contains(message)': False, 'contains(box)': False, 'contains(moving)': True, 'contains(herself)': False, 'contains(country)': False, 'contains(usual)': False, 'contains(martin)': False, 'contains(definitely)': False, 'contains(add)': False, 'contains(large)': False, 'contains(clever)': False, 'contains(create)': False, 'contains(felt)': False, 'contains(stories)': False, 'contains(brilliant)': False, 'contains(ones)': False, 'contains(giant)': False, 'contains(situation)': False, 'contains(murphy)': False, 'contains(break)': False, 'contains(opens)': False, 'contains(scary)': False, 'contains(doubt)': False, 'contains(drug)': True, 'contains(bunch)': False, 'contains(thinking)': False, 'contains(solid)': False, 'contains(effect)': False, 'contains(learn)': False, 'contains(move)': False, 'contains(force)': False, 'contains(potential)': False, 'contains(seriously)': False, 'contains(follows)': False, 'contains(above)': False, 'contains(saying)': False, 'contains(huge)': False, 'contains(class)': False, 'contains(plan)': False, 'contains(agent)': False, 'contains(created)': False, 'contains(unlike)': False, 'contains(pay)': False, 'contains(non)': True, 'contains(married)': False, 'contains(mark)': False, 'contains(sweet)': False, 'contains(perfectly)': False, 'contains(ex)': False, 'contains(realize)': False, 'contains(audiences)': False, 'contains(took)': False, 'contains(decent)': False, 'contains(likely)': False, 'contains(dream)': False, 'contains(view)': False, 'contains(scott)': False, 'contains(subject)': False, 'contains(understand)': False, 'contains(happened)': False, 'contains(enjoyable)': True, 'contains(studio)': False, 'contains(immediately)': False, 'contains(open)': False, 'contains(e)': False, 'contains(points)': False, 'contains(heard)': False, 'contains(viewers)': False, 'contains(cameron)': False, 'contains(truman)': False, 'contains(bruce)': False, 'contains(frank)': False, 'contains(private)': False, 'contains(stay)': False, 'contains(fails)': False, 'contains(impossible)': False, 'contains(cold)': False, 'contains(richard)': False, 'contains(overall)': False, 'contains(merely)': False, 'contains(exciting)': False, 'contains(mess)': False, 'contains(chase)': True, 'contains(free)': False, 'contains(ten)': False, 'contains(neither)': False, 'contains(wanted)': False, 'contains(gun)': True, 'contains(appear)': False, 'contains(carter)': False, 'contains(escape)': False, 'contains(ultimately)': False, 'contains(+)': False, 'contains(fan)': False, 'contains(inside)': False, 'contains(favorite)': False, 'contains(haven)': False, 'contains(modern)': False, 'contains(l)': False, 'contains(wedding)': False, 'contains(stone)': False, 'contains(trek)': False, 'contains(brought)': False, 'contains(trouble)': True, 'contains(otherwise)': False, 'contains(tim)': False, 'contains(5)': False, 'contains(allen)': False, 'contains(bond)': False, 'contains(society)': False, 'contains(liked)': False, 'contains(dumb)': False, 'contains(musical)': False, 'contains(stand)': False, 'contains(political)': False, 'contains(various)': False, 'contains(talented)': False, 'contains(particular)': False, 'contains(west)': False, 'contains(state)': False, 'contains(keeps)': True, 'contains(english)': False, 'contains(silly)': False, 'contains(u)': False, 'contains(situations)': False, 'contains(park)': False, 'contains(teen)': False, 'contains(rating)': False, 'contains(slightly)': False, 'contains(steve)': False, 'contains(truth)': False, 'contains(air)': False, 'contains(element)': False, 'contains(joke)': False, 'contains(spend)': False, 'contains(key)': True, 'contains(biggest)': False, 'contains(members)': False, 'contains(effort)': False, 'contains(government)': False, 'contains(focus)': False, 'contains(eddie)': False, 'contains(soundtrack)': False, 'contains(hands)': False, 'contains(earlier)': False, 'contains(chan)': True, 'contains(purpose)': False, 'contains(today)': True, 'contains(showing)': False, 'contains(memorable)': False, 'contains(six)': False, 'contains(cannot)': False, 'contains(max)': False, 'contains(offers)': False, 'contains(rated)': False, 'contains(mars)': False, 'contains(heavy)': False, 'contains(totally)': False, 'contains(control)': False, 'contains(credit)': False, 'contains(fi)': False, 'contains(woody)': False, 'contains(ideas)': False, 'contains(sci)': False, 'contains(wait)': False, 'contains(sit)': False, 'contains(female)': False, 'contains(ask)': False, 'contains(waste)': False, 'contains(terrible)': False, 'contains(depth)': False, 'contains(simon)': False, 'contains(aspect)': False, 'contains(list)': False, 'contains(mary)': False, 'contains(sister)': False, 'contains(animation)': False, 'contains(entirely)': False, 'contains(fear)': False, 'contains(steven)': False, 'contains(moves)': False, 'contains(actual)': False, 'contains(army)': False, 'contains(british)': False, 'contains(constantly)': False, 'contains(fire)': False, 'contains(convincing)': False, 'contains(setting)': False, 'contains(gave)': False, 'contains(tension)': False, 'contains(street)': False, 'contains(8)': False, 'contains(brief)': True, 'contains(ridiculous)': False, 'contains(cinematography)': False, 'contains(typical)': False, 'contains(nick)': False, 'contains(screenwriter)': False, 'contains(ability)': False, 'contains(spent)': False, 'contains(quick)': True, 'contains(violent)': False, 'contains(atmosphere)': False, 'contains(subtle)': False, 'contains(expected)': False, 'contains(fairly)': True, 'contains(seven)': False, 'contains(killing)': False, 'contains(tone)': False, 'contains(master)': False, 'contains(disaster)': False, 'contains(lots)': False, 'contains(thinks)': False, 'contains(song)': False, 'contains(cheap)': False, 'contains(suddenly)': False, 'contains(background)': False, 'contains(club)': False, 'contains(willis)': False, 'contains(whatever)': False, 'contains(highly)': False, 'contains(sees)': True, 'contains(complex)': False, 'contains(greatest)': False, 'contains(impact)': False, 'contains(beauty)': False, 'contains(front)': False, 'contains(humans)': False, 'contains(indeed)': False, 'contains(flat)': False, 'contains(grace)': False, 'contains(wrote)': False, 'contains(amusing)': False, 'contains(ii)': False, 'contains(mike)': False, 'contains(further)': False, 'contains(cute)': False, 'contains(dull)': False, 'contains(minor)': False, 'contains(recently)': False, 'contains(hate)': False, 'contains(outside)': False, 'contains(plenty)': False, 'contains(wish)': False, 'contains(godzilla)': False, 'contains(college)': False, 'contains(titanic)': False, 'contains(sounds)': False, 'contains(telling)': False, 'contains(sight)': False, 'contains(double)': False, 'contains(cinematic)': False, 'contains(queen)': False, 'contains(hold)': False, 'contains(meanwhile)': False, 'contains(awful)': False, 'contains(clearly)': False, 'contains(theme)': False, 'contains(hear)': False, 'contains(x)': False, 'contains(amount)': False, 'contains(baby)': False, 'contains(approach)': False, 'contains(dreams)': False, 'contains(shown)': False, 'contains(island)': False, 'contains(reasons)': False, 'contains(charm)': False, 'contains(miss)': True, 'contains(longer)': False, 'contains(common)': False, 'contains(sean)': False, 'contains(carry)': False, 'contains(believable)': False, 'contains(realistic)': False, 'contains(chemistry)': True, 'contains(possibly)': False, 'contains(casting)': False, 'contains(carrey)': False, 'contains(french)': False, 'contains(trailer)': False, 'contains(tough)': False, 'contains(produced)': False, 'contains(imagine)': False, 'contains(choice)': False, 'contains(ride)': False, 'contains(somewhere)': False, 'contains(hot)': False, 'contains(race)': False, 'contains(road)': False, 'contains(leader)': False, 'contains(thin)': False, 'contains(jerry)': False, 'contains(slowly)': False, 'contains(delivers)': False, 'contains(detective)': False, 'contains(brown)': False, 'contains(jackson)': False, 'contains(member)': False, 'contains(provide)': False, 'contains(president)': False, 'contains(puts)': False, 'contains(asks)': False, 'contains(critics)': False, 'contains(appearance)': False, 'contains(famous)': False, 'contains(okay)': False, 'contains(intelligence)': False, 'contains(energy)': False, 'contains(sent)': False, 'contains(spielberg)': False, 'contains(development)': False, 'contains(etc)': False, 'contains(language)': False, 'contains(blue)': False, 'contains(proves)': False, 'contains(vampire)': False, 'contains(seemingly)': False, 'contains(basic)': False, 'contains(caught)': False, 'contains(decide)': False, 'contains(opportunity)': False, 'contains(incredibly)': False, 'contains(images)': False, 'contains(band)': False, 'contains(j)': False, 'contains(writers)': False, 'contains(knew)': False, 'contains(interested)': False, 'contains(considering)': False, 'contains(boys)': False, 'contains(thanks)': False, 'contains(remains)': False, 'contains(climax)': True, 'contains(event)': False, 'contains(directing)': False, 'contains(conclusion)': False, 'contains(leading)': False, 'contains(ground)': False, 'contains(lies)': False, 'contains(forget)': False, 'contains(alive)': False, 'contains(tarzan)': False, 'contains(century)': False, 'contains(provides)': False, 'contains(trip)': False, 'contains(partner)': False, 'contains(central)': False, 'contains(tarantino)': False, 'contains(period)': False, 'contains(pace)': False, 'contains(yourself)': False, 'contains(worked)': False, 'contains(ready)': False, 'contains(date)': False, 'contains(thus)': False, 'contains(1998)': False, 'contains(terrific)': False, 'contains(write)': False, 'contains(average)': False, 'contains(onto)': False, 'contains(songs)': False, 'contains(occasionally)': False, 'contains(doctor)': False, 'contains(stands)': False, 'contains(hardly)': False, 'contains(monster)': False, 'contains(led)': False, 'contains(mysterious)': False, 'contains(details)': False, 'contains(wasted)': False, 'contains(apart)': False, 'contains(aside)': False, 'contains(store)': False, 'contains(billy)': False, 'contains(boss)': True, 'contains(travolta)': False, 'contains(producer)': False, 'contains(pull)': False, 'contains(consider)': False, 'contains(pictures)': False, 'contains(becoming)': False, 'contains(cage)': False, 'contains(loud)': False, 'contains(looked)': False, 'contains(officer)': False, 'contains(twenty)': False, 'contains(system)': False, 'contains(contains)': False, 'contains(julia)': False, 'contains(subplot)': False, 'contains(missing)': False, 'contains(personality)': False, 'contains(building)': False, 'contains(learns)': False, 'contains(hong)': True, 'contains(la)': False, 'contains(apartment)': False, 'contains(7)': False, 'contains(bizarre)': False, 'contains(powers)': False, 'contains(flaws)': False, 'contains(catch)': False, 'contains(lawyer)': False, 'contains(shoot)': False, 'contains(student)': False, 'contains(unique)': True, 'contains(000)': False, 'contains(admit)': False, 'contains(concept)': False, 'contains(needed)': False, 'contains(thrown)': False, 'contains(christopher)': False, 'contains(laughing)': False, 'contains(green)': False, 'contains(twists)': False, 'contains(matthew)': False, 'contains(touch)': False, 'contains(waiting)': False, 'contains(victim)': False, 'contains(cover)': False, 'contains(machine)': False, 'contains(danny)': False, 'contains(mention)': False, 'contains(search)': False, 'contains(1997)': False, 'contains(win)': False, 'contains(door)': False, 'contains(manner)': False, 'contains(train)': True, 'contains(saving)': False, 'contains(share)': False, 'contains(image)': False, 'contains(discovers)': False, 'contains(normal)': False, 'contains(cross)': False, 'contains(fox)': False, 'contains(returns)': False, 'contains(adult)': False, 'contains(adds)': False, 'contains(answer)': False, 'contains(adventure)': False, 'contains(lame)': False, 'contains(male)': False, 'contains(odd)': False, 'contains(singer)': False, 'contains(deserves)': False, 'contains(gore)': False, 'contains(states)': False, 'contains(include)': False, 'contains(equally)': False, 'contains(months)': False, 'contains(barely)': False, 'contains(directors)': False, 'contains(introduced)': False, 'contains(fashion)': False, 'contains(social)': False, 'contains(1999)': False, 'contains(news)': False, 'contains(hair)': False, 'contains(dance)': False, 'contains(innocent)': False, 'contains(camp)': False, 'contains(teacher)': False, 'contains(became)': False, 'contains(sad)': False, 'contains(witch)': False, 'contains(includes)': False, 'contains(nights)': False, 'contains(jason)': False, 'contains(julie)': False, 'contains(latter)': False, 'contains(food)': True, 'contains(jennifer)': False, 'contains(land)': False, 'contains(menace)': False, 'contains(rate)': False, 'contains(storyline)': False, 'contains(contact)': False, 'contains(jean)': False, 'contains(elizabeth)': False, 'contains(fellow)': False, 'contains(changes)': False, 'contains(henry)': False, 'contains(hill)': False, 'contains(pulp)': False, 'contains(gay)': False, 'contains(tried)': False, 'contains(surprised)': False, 'contains(literally)': False, 'contains(walk)': False, 'contains(standard)': False, 'contains(90)': False, 'contains(forward)': False, 'contains(wise)': False, 'contains(enjoyed)': False, 'contains(discover)': False, 'contains(pop)': False, 'contains(anderson)': False, 'contains(offer)': False, 'contains(recommend)': False, 'contains(public)': False, 'contains(drive)': False, 'contains(c)': False, 'contains(toy)': False, 'contains(charming)': False, 'contains(fair)': False, 'contains(chinese)': True, 'contains(rescue)': False, 'contains(terms)': False, 'contains(mouth)': False, 'contains(lucas)': False, 'contains(accident)': False, 'contains(dies)': False, 'contains(decided)': False, 'contains(edge)': False, 'contains(footage)': False, 'contains(culture)': False, 'contains(weak)': False, 'contains(presented)': False, 'contains(blade)': False, 'contains(younger)': False, 'contains(douglas)': False, 'contains(natural)': False, 'contains(born)': False, 'contains(generally)': False, 'contains(teenage)': False, 'contains(older)': False, 'contains(horrible)': False, 'contains(addition)': False, 'contains(sadly)': False, 'contains(creates)': False, 'contains(disturbing)': False, 'contains(roger)': False, 'contains(detail)': False, 'contains(devil)': False, 'contains(debut)': False, 'contains(track)': False, 'contains(developed)': False, 'contains(week)': False, 'contains(russell)': False, 'contains(attack)': False, 'contains(explain)': False, 'contains(rarely)': False, 'contains(fully)': False, 'contains(prove)': False, 'contains(exception)': False, 'contains(jeff)': False, 'contains(twist)': False, 'contains(gang)': False, 'contains(winning)': False, 'contains(jr)': False, 'contains(species)': False, 'contains(issues)': False, 'contains(fresh)': False, 'contains(rules)': False, 'contains(meaning)': False, 'contains(inspired)': False, 'contains(heroes)': False, 'contains(desperate)': False, 'contains(fighting)': False, 'contains(filmed)': False, 'contains(faces)': False, 'contains(alan)': False, 'contains(bright)': False, 'contains(ass)': True, 'contains(flying)': False, 'contains(kong)': True, 'contains(rush)': False, 'contains(forces)': False, 'contains(charles)': False, 'contains(numerous)': False, 'contains(emotions)': False, 'contains(involves)': True, 'contains(patrick)': False, 'contains(weird)': False, 'contains(apparent)': False, 'contains(information)': False, 'contains(revenge)': False, 'contains(jay)': False, 'contains(toward)': False, 'contains(surprising)': False, 'contains(twice)': False, 'contains(editing)': False, 'contains(calls)': False, 'contains(lose)': False, 'contains(vegas)': False, 'contains(stage)': False, 'contains(intended)': False, 'contains(gags)': False, 'contains(opinion)': False, 'contains(likes)': False, 'contains(crazy)': False, 'contains(owner)': False, 'contains(places)': False, 'contains(pair)': False, 'contains(genuine)': False, 'contains(epic)': False, 'contains(speak)': False, 'contains(throw)': False, 'contains(appeal)': False, 'contains(gibson)': False, 'contains(captain)': False, 'contains(military)': False, 'contains(20)': False, 'contains(blair)': False, 'contains(nowhere)': False, 'contains(length)': False, 'contains(nicely)': False, 'contains(cause)': False, 'contains(pass)': False, 'contains(episode)': False, 'contains(kiss)': False, 'contains(arnold)': True, 'contains(please)': False, 'contains(hasn)': False, 'contains(phone)': False, 'contains(filmmaking)': False, 'contains(formula)': False, 'contains(boyfriend)': False, 'contains(talents)': False, 'contains(creating)': False, 'contains(kelly)': False, 'contains(buy)': False, 'contains(wide)': False, 'contains(fantasy)': False, 'contains(mood)': False, 'contains(heads)': False, 'contains(pathetic)': False, 'contains(lacks)': False, 'contains(loved)': False, 'contains(asked)': False, 'contains(mrs)': False, 'contains(witty)': False, 'contains(shakespeare)': False, 'contains(mulan)': False, 'contains(generation)': False, 'contains(affair)': False, 'contains(pieces)': False, 'contains(task)': False, 'contains(rare)': False, 'contains(kept)': False, 'contains(cameo)': False, 'contains(fascinating)': False, 'contains(ed)': False, 'contains(fbi)': False, 'contains(burton)': False, 'contains(incredible)': False, 'contains(accent)': False, 'contains(artist)': False, 'contains(superior)': False, 'contains(academy)': False, 'contains(thomas)': False, 'contains(spirit)': False, 'contains(technical)': False, 'contains(confusing)': False, 'contains(poorly)': False, 'contains(target)': False, 'contains(lover)': False, 'contains(woo)': False, 'contains(mentioned)': False, 'contains(theaters)': False, 'contains(plane)': False, 'contains(confused)': False, 'contains(dennis)': False, 'contains(rob)': False, 'contains(appropriate)': False, 'contains(christmas)': False, 'contains(considered)': False, 'contains(legend)': False, 'contains(shame)': False, 'contains(soul)': False, 'contains(matt)': False, 'contains(campbell)': False, 'contains(process)': False, 'contains(bottom)': False, 'contains(sitting)': False, 'contains(brain)': False, 'contains(creepy)': False, 'contains(13)': False, 'contains(forever)': False, 'contains(dude)': False, 'contains(crap)': False, 'contains(superb)': False, 'contains(speech)': False, 'contains(ice)': False, 'contains(journey)': False, 'contains(masterpiece)': False, 'contains(intriguing)': False, 'contains(names)': False, 'contains(pick)': False, 'contains(speaking)': False, 'contains(virtually)': False, 'contains(award)': False, 'contains(worthy)': False, 'contains(marriage)': False, 'contains(deliver)': False, 'contains(cash)': False, 'contains(magic)': False, 'contains(respect)': False, 'contains(product)': False, 'contains(necessary)': False, 'contains(suppose)': False, 'contains(silent)': False, 'contains(pointless)': False, 'contains(station)': False, 'contains(affleck)': False, 'contains(dimensional)': False, 'contains(charlie)': False, 'contains(allows)': False, 'contains(avoid)': False, 'contains(meant)': False, 'contains(cops)': False, 'contains(attitude)': False, 'contains(relationships)': False, 'contains(hits)': False, 'contains(stephen)': False, 'contains(spends)': False, 'contains(relief)': False, 'contains(physical)': True, 'contains(count)': False, 'contains(reviews)': False, 'contains(appreciate)': False, 'contains(cliches)': False, 'contains(holds)': False, 'contains(pure)': False, 'contains(plans)': False, 'contains(limited)': False, 'contains(failed)': False, 'contains(pain)': False, 'contains(impression)': False, 'contains(unless)': False, 'contains(sub)': False, 'contains([)': False, 'contains(total)': False, 'contains(creature)': False, 'contains(viewing)': False, 'contains(loves)': False, 'contains(princess)': False, 'contains(kate)': False, 'contains(rising)': False, 'contains(woods)': False, 'contains(baldwin)': False, 'contains(angry)': False, 'contains(drawn)': False, 'contains(step)': False, 'contains(matrix)': False, 'contains(themes)': False, 'contains(satire)': False, 'contains(arts)': False, 'contains(])': False, 'contains(remake)': False, 'contains(wall)': False, 'contains(moral)': False, 'contains(color)': False, 'contains(ray)': False, 'contains(stuck)': False, 'contains(touching)': False, 'contains(wit)': False, 'contains(tony)': False, 'contains(hanks)': False, 'contains(continues)': False, 'contains(damn)': False, 'contains(nobody)': False, 'contains(cartoon)': False, 'contains(keeping)': False, 'contains(realized)': False, 'contains(criminal)': False, 'contains(unfunny)': False, 'contains(comedic)': False, 'contains(martial)': False, 'contains(disappointing)': False, 'contains(anti)': False, 'contains(graphic)': False, 'contains(stunning)': False, 'contains(actions)': False, 'contains(floor)': False, 'contains(emotion)': False, 'contains(soldiers)': False, 'contains(edward)': False, 'contains(comedies)': False, 'contains(driver)': False, 'contains(expectations)': False, 'contains(added)': False, 'contains(mad)': False, 'contains(angels)': False, 'contains(shallow)': False, 'contains(suspect)': False, 'contains(humorous)': False, 'contains(phantom)': False, 'contains(appealing)': False, 'contains(device)': False, 'contains(design)': False, 'contains(industry)': False, 'contains(reach)': False, 'contains(fat)': False, 'contains(blame)': False, 'contains(united)': False, 'contains(sign)': False, 'contains(portrayal)': False, 'contains(rocky)': False, 'contains(finale)': False, 'contains(grand)': False, 'contains(opposite)': False, 'contains(hotel)': False, 'contains(match)': False, 'contains(damme)': False, 'contains(speed)': False, 'contains(ok)': False, 'contains(loving)': False, 'contains(field)': True, 'contains(larry)': False, 'contains(urban)': False, 'contains(troopers)': False, 'contains(compared)': False, 'contains(apes)': False, 'contains(rose)': False, 'contains(falling)': False, 'contains(era)': False, 'contains(loses)': False, 'contains(adults)': False, 'contains(managed)': False, 'contains(dad)': False, 'contains(therefore)': False, 'contains(pg)': False, 'contains(results)': False, 'contains(guns)': False, 'contains(radio)': False, 'contains(lady)': False, 'contains(manage)': False, 'contains(spice)': False, 'contains(naked)': False, 'contains(started)': False, 'contains(intense)': False, 'contains(humanity)': False, 'contains(wonderfully)': False, 'contains(slasher)': False, 'contains(bland)': False, 'contains(imagination)': False, 'contains(walking)': False, 'contains(willing)': False, 'contains(horse)': False, 'contains(rent)': False, 'contains(mix)': False, 'contains(generated)': False, 'contains(g)': False, 'contains(utterly)': False, 'contains(scientist)': False, 'contains(washington)': False, 'contains(notice)': False, 'contains(players)': False, 'contains(teenagers)': False, 'contains(moore)': False, 'contains(board)': False, 'contains(price)': False, 'contains(frightening)': False, 'contains(tommy)': False, 'contains(spectacular)': False, 'contains(bored)': False, 'contains(jane)': False, 'contains(join)': False, 'contains(producers)': False, 'contains(johnny)': False, 'contains(zero)': False, 'contains(vampires)': False, 'contains(adaptation)': False, 'contains(dollars)': False, 'contains(parody)': False, 'contains(documentary)': False, 'contains(dvd)': False, 'contains(wayne)': False, 'contains(post)': False, 'contains(exist)': False, 'contains(matters)': False, 'contains(chosen)': False, 'contains(mel)': False, 'contains(attractive)': True, 'contains(plain)': False, 'contains(trust)': False, 'contains(safe)': False, 'contains(reading)': False, 'contains(hoping)': False, 'contains(protagonist)': False, 'contains(feelings)': False, 'contains(fate)': False, 'contains(finding)': False, 'contains(feet)': False, 'contains(visuals)': False, 'contains(spawn)': False, 'contains(compelling)': False, 'contains(hall)': False, 'contains(sympathetic)': False, 'contains(featuring)': False, 'contains(difference)': False, 'contains(professional)': False, 'contains(drugs)': False, 'contains(ford)': False, 'contains(shooting)': False, 'contains(gold)': False, 'contains(patch)': False, 'contains(build)': False, 'contains(boat)': False, 'contains(cruise)': False, 'contains(honest)': False, 'contains(media)': False, 'contains(flicks)': False, 'contains(bug)': False, 'contains(bringing)': False, 'contains(dangerous)': True, 'contains(watched)': False, 'contains(grant)': False, 'contains(smile)': False, 'contains(plus)': False, 'contains(shouldn)': False, 'contains(decision)': False, 'contains(visually)': False, 'contains(allow)': False, 'contains(starship)': False, 'contains(roberts)': False, 'contains(dying)': False, 'contains(portrayed)': False, 'contains(turning)': False, 'contains(believes)': False, 'contains(changed)': False, 'contains(shock)': False, 'contains(destroy)': False, 'contains(30)': False, 'contains(crowd)': False, 'contains(broken)': False, 'contains(tired)': False, 'contains(fail)': False, 'contains(south)': False, 'contains(died)': False, 'contains(cult)': False, 'contains(fake)': False, 'contains(vincent)': False, 'contains(identity)': False, 'contains(sexy)': False, 'contains(hunt)': False, 'contains(jedi)': False, 'contains(flynt)': False, 'contains(alex)': False, 'contains(engaging)': False, 'contains(serve)': False, 'contains(snake)': False, 'contains(yeah)': False, 'contains(expecting)': False, 'contains(100)': False, 'contains(decade)': False, 'contains(ups)': False, 'contains(constant)': False, 'contains(current)': False, 'contains(survive)': False, 'contains(jimmy)': False, 'contains(buddy)': False, 'contains(send)': False, 'contains(brooks)': False, 'contains(goofy)': False, 'contains(likable)': False, 'contains(humour)': False, 'contains(technology)': False, 'contains(files)': False, 'contains(babe)': False, 'contains(aspects)': False, 'contains(presents)': False, 'contains(kills)': False, 'contains(supposedly)': False, 'contains(eight)': True, 'contains(sandler)': False, 'contains(hospital)': False, 'contains(test)': False, 'contains(hidden)': False, 'contains(brian)': False, 'contains(books)': False, 'contains(promise)': False, 'contains(determined)': False, 'contains(professor)': False, 'contains(welcome)': False, 'contains(pleasure)': False, 'contains(succeeds)': False, 'contains(individual)': False, 'contains(annie)': False, 'contains(mob)': False, 'contains(ted)': False, 'contains(virus)': False, 'contains(content)': False, 'contains(gary)': False, 'contains(direct)': False, 'contains(contrived)': False, 'contains(carpenter)': False, 'contains(scale)': False, 'contains(sick)': False, 'contains(nasty)': False, 'contains(conflict)': False, 'contains(haunting)': False, 'contains(ghost)': False, 'contains(filmmaker)': False, 'contains(japanese)': False, 'contains(helps)': False, 'contains(fare)': False, 'contains(lucky)': False, 'contains(ultimate)': False, 'contains(window)': False, 'contains(support)': False, 'contains(goal)': False, 'contains(provided)': False, 'contains(genius)': False, 'contains(winner)': False, 'contains(taylor)': False, 'contains(fantastic)': False, 'contains(faith)': False, 'contains(lynch)': False, 'contains(fit)': False, 'contains(catherine)': False, 'contains(ms)': False, 'contains(paced)': False, 'contains(breaks)': False, 'contains(al)': False, 'contains(frame)': False, 'contains(travel)': False, 'contains(badly)': False, 'contains(available)': False, 'contains(cares)': False, 'contains(reeves)': False, 'contains(crash)': False, 'contains(driving)': False, 'contains(press)': False, 'contains(seagal)': False, 'contains(amy)': False, 'contains(9)': False, 'contains(headed)': False, 'contains(instance)': False, 'contains(excuse)': False, 'contains(offensive)': False, 'contains(narrative)': False, 'contains(fault)': False, 'contains(bus)': False, 'contains(f)': False, 'contains(extreme)': False, 'contains(miller)': False, 'contains(guilty)': False, 'contains(grows)': False, 'contains(overly)': False, 'contains(liners)': False, 'contains(forgotten)': False, 'contains(ahead)': False, 'contains(accept)': False, 'contains(porn)': False, 'contains(directly)': False, 'contains(helen)': False, 'contains(began)': False, 'contains(lord)': False, 'contains(folks)': False, 'contains(mediocre)': False, 'contains(bar)': False, 'contains(surface)': False, 'contains(super)': False, 'contains(failure)': False, 'contains(6)': False, 'contains(acted)': False, 'contains(quiet)': False, 'contains(laughable)': False, 'contains(sheer)': False, 'contains(security)': True, 'contains(emotionally)': False, 'contains(season)': False, 'contains(stuart)': False, 'contains(jail)': True, 'contains(deals)': False, 'contains(cheesy)': False, 'contains(court)': False, 'contains(beach)': False, 'contains(austin)': False, 'contains(model)': False, 'contains(outstanding)': False, 'contains(substance)': False, 'contains(nudity)': False, 'contains(slapstick)': False, 'contains(joan)': False, 'contains(reveal)': False, 'contains(placed)': False, 'contains(check)': False, 'contains(beast)': False, 'contains(hurt)': False, 'contains(bloody)': False, 'contains(acts)': False, 'contains(fame)': False, 'contains(meeting)': False, 'contains(nuclear)': False, 'contains(1996)': False, 'contains(strength)': False, 'contains(center)': False, 'contains(funniest)': False, 'contains(standing)': True, 'contains(damon)': False, 'contains(clich)': False, 'contains(position)': False, 'contains(desire)': False, 'contains(driven)': False, 'contains(seat)': False, 'contains(stock)': False, 'contains(wondering)': True, 'contains(realizes)': False, 'contains(dealing)': False, 'contains(taste)': False, 'contains(routine)': False, 'contains(comparison)': False, 'contains(cinematographer)': False, 'contains(seconds)': False, 'contains(singing)': False, 'contains(gangster)': True, 'contains(responsible)': False, 'contains(football)': False, 'contains(remarkable)': False, 'contains(hunting)': False, 'contains(adams)': False, 'contains(fly)': False, 'contains(suspects)': False, 'contains(treat)': False, 'contains(hopes)': False, 'contains(heaven)': False, 'contains(myers)': False, 'contains(struggle)': False, 'contains(costumes)': False, 'contains(beat)': False, 'contains(happening)': False, 'contains(skills)': False, 'contains(ugly)': False, 'contains(figures)': False, 'contains(thoroughly)': False, 'contains(ill)': False, 'contains(surprises)': False, 'contains(player)': False, 'contains(rival)': False, 'contains(guard)': True, 'contains(anthony)': False, 'contains(strike)': False, 'contains(community)': False, 'contains(streets)': False, 'contains(hopkins)': False, 'contains(ended)': False, 'contains(originally)': False, 'contains(sarah)': False, 'contains(creative)': False, 'contains(characterization)': False, 'contains(thankfully)': False, 'contains(growing)': False, 'contains(sharp)': False, 'contains(williamson)': False, 'contains(eccentric)': False, 'contains(explained)': False, 'contains(hey)': False, 'contains(claire)': False, 'contains(steal)': False, 'contains(inevitable)': False, 'contains(joel)': False, 'contains(core)': False, 'contains(weren)': False, 'contains(sorry)': False, 'contains(built)': False, 'contains(anne)': False, 'contains(breaking)': False, 'contains(villains)': False, 'contains(critic)': False, 'contains(lets)': False, 'contains(visit)': False, 'contains(followed)': False}\n"
],
[
"featuresets = [(document_features(d), c) for (d,c) in documents]\ntrain_set, test_set = featuresets[100:], featuresets[:100]\nb = nltk.NaiveBayesClassifier.train(train_set)",
"_____no_output_____"
],
[
"#document_features(\"a movie with bad actors\".split(\" \"))",
"_____no_output_____"
],
[
"b.classify(document_features(\"a movie with bad actors\".split(\" \")))",
"_____no_output_____"
],
[
"b.classify(document_features(\"an uplifting movie with russel crowe\".split(\" \")))",
"_____no_output_____"
],
[
"b.show_most_informative_features(10)",
"Most Informative Features\n contains(outstanding) = True pos : neg = 10.5 : 1.0\n contains(mulan) = True pos : neg = 9.0 : 1.0\n contains(seagal) = True neg : pos = 7.8 : 1.0\n contains(wonderfully) = True pos : neg = 7.3 : 1.0\n contains(lame) = True neg : pos = 6.4 : 1.0\n contains(damon) = True pos : neg = 5.9 : 1.0\n contains(wasted) = True neg : pos = 5.2 : 1.0\n contains(ridiculous) = True neg : pos = 5.2 : 1.0\n contains(waste) = True neg : pos = 4.9 : 1.0\n contains(era) = True pos : neg = 4.9 : 1.0\n"
],
[
"b.accuracy(test_set)",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a664e48f9a9e3ca64b8d628f1d61190a6095045
| 7,805 |
ipynb
|
Jupyter Notebook
|
examples/notebook/contrib/pandigital_numbers.ipynb
|
jdarlay/or-tools
|
a41cf1b50f9e777c273133840968cf50434f3bd5
|
[
"Apache-2.0"
] | 1 |
2022-03-08T22:28:12.000Z
|
2022-03-08T22:28:12.000Z
|
examples/notebook/contrib/pandigital_numbers.ipynb
|
jdarlay/or-tools
|
a41cf1b50f9e777c273133840968cf50434f3bd5
|
[
"Apache-2.0"
] | null | null | null |
examples/notebook/contrib/pandigital_numbers.ipynb
|
jdarlay/or-tools
|
a41cf1b50f9e777c273133840968cf50434f3bd5
|
[
"Apache-2.0"
] | null | null | null | 33.072034 | 256 | 0.558232 |
[
[
[
"empty"
]
]
] |
[
"empty"
] |
[
[
"empty"
]
] |
4a664f2e8d0cac1051dd20033aa42cba2ba4eef5
| 88,949 |
ipynb
|
Jupyter Notebook
|
quests/serverlessml/04_keras/taxifare_model.ipynb
|
SDRLurker/training-data-analyst
|
c4c7778e124eccb54f1a6dc57397a591ff8d6398
|
[
"Apache-2.0"
] | 1 |
2019-07-21T15:13:34.000Z
|
2019-07-21T15:13:34.000Z
|
quests/serverlessml/04_keras/taxifare_model.ipynb
|
SDRLurker/training-data-analyst
|
c4c7778e124eccb54f1a6dc57397a591ff8d6398
|
[
"Apache-2.0"
] | null | null | null |
quests/serverlessml/04_keras/taxifare_model.ipynb
|
SDRLurker/training-data-analyst
|
c4c7778e124eccb54f1a6dc57397a591ff8d6398
|
[
"Apache-2.0"
] | 1 |
2022-02-27T20:24:23.000Z
|
2022-02-27T20:24:23.000Z
| 188.451271 | 34,116 | 0.874467 |
[
[
[
"# A simple DNN model built in Keras.\n\nLet's start off with the Python imports that we need.",
"_____no_output_____"
]
],
[
[
"import os, json, math\nimport numpy as np\nimport shutil\nimport tensorflow as tf\nprint(tf.__version__)",
"2.0.0-dev20190612\n"
]
],
[
[
"## Locating the CSV files\n\nWe will start with the CSV files that we wrote out in the [first notebook](../01_explore/taxifare.iypnb) of this sequence. Just so you don't have to run the notebook, we saved a copy in ../data",
"_____no_output_____"
]
],
[
[
"!ls -l ../data/*.csv",
"-rw-r--r-- 1 jupyter jupyter 126266 Jun 3 15:48 ../data/taxi-test.csv\n-rw-r--r-- 1 jupyter jupyter 593612 Jun 3 15:48 ../data/taxi-train.csv\n-rw-r--r-- 1 jupyter jupyter 126833 Jun 3 15:48 ../data/taxi-valid.csv\n"
]
],
[
[
"## Use tf.data to read the CSV files\n\nWe wrote these cells in the [third notebook](../03_tfdata/input_pipeline.ipynb) of this sequence.",
"_____no_output_____"
]
],
[
[
"CSV_COLUMNS = ['fare_amount', 'pickup_datetime',\n 'pickup_longitude', 'pickup_latitude', \n 'dropoff_longitude', 'dropoff_latitude', \n 'passenger_count', 'key']\nLABEL_COLUMN = 'fare_amount'\nDEFAULTS = [[0.0],['na'],[0.0],[0.0],[0.0],[0.0],[0.0],['na']]",
"_____no_output_____"
],
[
"def features_and_labels(row_data):\n for unwanted_col in ['pickup_datetime', 'key']:\n row_data.pop(unwanted_col)\n label = row_data.pop(LABEL_COLUMN)\n return row_data, label # features, label\n\n# load the training data\ndef load_dataset(pattern, batch_size=1, mode=tf.estimator.ModeKeys.EVAL):\n dataset = (tf.data.experimental.make_csv_dataset(pattern, batch_size, CSV_COLUMNS, DEFAULTS)\n .map(features_and_labels) # features, label\n .cache())\n if mode == tf.estimator.ModeKeys.TRAIN:\n dataset = dataset.shuffle(1000).repeat()\n dataset = dataset.prefetch(1) # take advantage of multi-threading; 1=AUTOTUNE\n return dataset",
"_____no_output_____"
],
[
"## Build a simple Keras DNN using its Functional API\ndef rmse(y_true, y_pred):\n return tf.sqrt(tf.reduce_mean(tf.square(y_pred - y_true))) \n\ndef build_dnn_model():\n INPUT_COLS = ['pickup_longitude', 'pickup_latitude', \n 'dropoff_longitude', 'dropoff_latitude', \n 'passenger_count']\n\n # input layer\n inputs = {\n colname : tf.keras.layers.Input(name=colname, shape=(), dtype='float32')\n for colname in INPUT_COLS\n }\n feature_columns = {\n colname : tf.feature_column.numeric_column(colname)\n for colname in INPUT_COLS\n }\n \n # the constructor for DenseFeatures takes a list of numeric columns\n # The Functional API in Keras requires that you specify: LayerConstructor()(inputs)\n dnn_inputs = tf.keras.layers.DenseFeatures(feature_columns.values())(inputs)\n\n # two hidden layers of [32, 8] just in like the BQML DNN\n h1 = tf.keras.layers.Dense(32, activation='relu', name='h1')(dnn_inputs)\n h2 = tf.keras.layers.Dense(8, activation='relu', name='h2')(h1)\n\n # final output is a linear activation because this is regression\n output = tf.keras.layers.Dense(1, activation='linear', name='fare')(h2)\n model = tf.keras.models.Model(inputs, output)\n model.compile(optimizer='adam', loss='mse', metrics=[rmse, 'mse'])\n return model\n\nmodel = build_dnn_model()\nprint(model.summary())",
"Model: \"model\"\n__________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n==================================================================================================\ndropoff_latitude (InputLayer) [(None,)] 0 \n__________________________________________________________________________________________________\ndropoff_longitude (InputLayer) [(None,)] 0 \n__________________________________________________________________________________________________\npassenger_count (InputLayer) [(None,)] 0 \n__________________________________________________________________________________________________\npickup_latitude (InputLayer) [(None,)] 0 \n__________________________________________________________________________________________________\npickup_longitude (InputLayer) [(None,)] 0 \n__________________________________________________________________________________________________\ndense_features (DenseFeatures) (None, 5) 0 dropoff_latitude[0][0] \n dropoff_longitude[0][0] \n passenger_count[0][0] \n pickup_latitude[0][0] \n pickup_longitude[0][0] \n__________________________________________________________________________________________________\nh1 (Dense) (None, 32) 192 dense_features[0][0] \n__________________________________________________________________________________________________\nh2 (Dense) (None, 8) 264 h1[0][0] \n__________________________________________________________________________________________________\nfare (Dense) (None, 1) 9 h2[0][0] \n==================================================================================================\nTotal params: 465\nTrainable params: 465\nNon-trainable params: 0\n__________________________________________________________________________________________________\nNone\n"
],
[
"tf.keras.utils.plot_model(model, 'dnn_model.png', show_shapes=False, rankdir='LR')",
"_____no_output_____"
]
],
[
[
"## Train model\n\nTo train the model, call model.fit()",
"_____no_output_____"
]
],
[
[
"TRAIN_BATCH_SIZE = 32\nNUM_TRAIN_EXAMPLES = 10000 * 5 # training dataset repeats, so it will wrap around\nNUM_EVALS = 5 # how many times to evaluate\nNUM_EVAL_EXAMPLES = 10000 # enough to get a reasonable sample, but not so much that it slows down\n\ntrainds = load_dataset('../data/taxi-train*', TRAIN_BATCH_SIZE, tf.estimator.ModeKeys.TRAIN)\nevalds = load_dataset('../data/taxi-valid*', 1000, tf.estimator.ModeKeys.EVAL).take(NUM_EVAL_EXAMPLES//1000)\n\nsteps_per_epoch = NUM_TRAIN_EXAMPLES // (TRAIN_BATCH_SIZE * NUM_EVALS)\n\nhistory = model.fit(trainds, \n validation_data=evalds,\n epochs=NUM_EVALS, \n steps_per_epoch=steps_per_epoch)",
"WARNING: Logging before flag parsing goes to stderr.\nW0618 04:33:59.964382 139677162067712 deprecation.py:323] From /home/jupyter/.local/lib/python3.5/site-packages/tensorflow_core/python/data/experimental/ops/readers.py:498: parallel_interleave (from tensorflow.python.data.experimental.ops.interleave_ops) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse `tf.data.Dataset.interleave(map_func, cycle_length, block_length, num_parallel_calls=tf.data.experimental.AUTOTUNE)` instead. If sloppy execution is desired, use `tf.data.Options.experimental_determinstic`.\nW0618 04:33:59.998314 139677162067712 deprecation.py:323] From /home/jupyter/.local/lib/python3.5/site-packages/tensorflow_core/python/data/experimental/ops/readers.py:211: shuffle_and_repeat (from tensorflow.python.data.experimental.ops.shuffle_ops) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse `tf.data.Dataset.shuffle(buffer_size, seed)` followed by `tf.data.Dataset.repeat(count)`. Static tf.data optimizations will take care of using the fused implementation.\n"
],
[
"# plot\nimport matplotlib.pyplot as plt\nnrows = 1\nncols = 2\nfig = plt.figure(figsize=(10, 5))\n\nfor idx, key in enumerate(['loss', 'rmse']):\n ax = fig.add_subplot(nrows, ncols, idx+1)\n plt.plot(history.history[key])\n plt.plot(history.history['val_{}'.format(key)])\n plt.title('model {}'.format(key))\n plt.ylabel(key)\n plt.xlabel('epoch')\n plt.legend(['train', 'validation'], loc='upper left');",
"_____no_output_____"
]
],
[
[
"## Predict with model\n\nThis is how you'd predict with this model. ",
"_____no_output_____"
]
],
[
[
"model.predict({\n 'pickup_longitude': tf.convert_to_tensor([-73.982683]),\n 'pickup_latitude': tf.convert_to_tensor([40.742104]),\n 'dropoff_longitude': tf.convert_to_tensor([-73.983766]),\n 'dropoff_latitude': tf.convert_to_tensor([40.755174]),\n 'passenger_count': tf.convert_to_tensor([3.0]), \n})",
"_____no_output_____"
]
],
[
[
"Of course, this is not realistic, because we can't expect client code to have a model object in memory. We'll have to export our model to a file, and expect client code to instantiate the model from that exported file.",
"_____no_output_____"
],
[
"## Export model\n\nLet's export the model to a TensorFlow SavedModel format. Once we have a model in this format, we have lots of ways to \"serve\" the model, from a web application, from JavaScript, from mobile applications, etc.",
"_____no_output_____"
]
],
[
[
"# This doesn't work yet.\nshutil.rmtree('./export/savedmodel', ignore_errors=True)\ntf.keras.experimental.export_saved_model(model, './export/savedmodel')\n\n# Recreate the exact same model\nnew_model = tf.keras.experimental.load_from_saved_model('./export/savedmodel')\n\n# try predicting with this model\nnew_model.predict({\n 'pickup_longitude': tf.convert_to_tensor([-73.982683]),\n 'pickup_latitude': tf.convert_to_tensor([40.742104]),\n 'dropoff_longitude': tf.convert_to_tensor([-73.983766]),\n 'dropoff_latitude': tf.convert_to_tensor([40.755174]),\n 'passenger_count': tf.convert_to_tensor([3.0]), \n})",
"W0618 04:34:29.956341 139677162067712 export_utils.py:182] Export includes no default signature!\nW0618 04:34:30.360865 139677162067712 export_utils.py:182] Export includes no default signature!\nW0618 04:34:30.782016 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer.beta_1\nW0618 04:34:30.783495 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer.beta_2\nW0618 04:34:30.787230 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer.decay\nW0618 04:34:30.789488 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer.learning_rate\nW0618 04:34:30.791131 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-0.kernel\nW0618 04:34:30.792478 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-0.bias\nW0618 04:34:30.794119 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-1.kernel\nW0618 04:34:30.795123 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-1.bias\nW0618 04:34:30.796163 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-2.kernel\nW0618 04:34:30.797219 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-2.bias\nW0618 04:34:30.798278 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-0.kernel\nW0618 04:34:30.799416 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-0.bias\nW0618 04:34:30.800268 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-1.kernel\nW0618 04:34:30.801210 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-1.bias\nW0618 04:34:30.802492 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-2.kernel\nW0618 04:34:30.803513 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-2.bias\nW0618 04:34:30.804464 139677162067712 util.py:260] A checkpoint was restored (e.g. tf.train.Checkpoint.restore or tf.keras.Model.load_weights) but not all checkpointed values were used. See above for specific issues. Use expect_partial() on the load status object, e.g. tf.train.Checkpoint.restore(...).expect_partial(), to silence these warnings, or use assert_consumed() to make the check explicit. See https://www.tensorflow.org/alpha/guide/checkpoints#loading_mechanics for details.\nW0618 04:34:30.833565 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer.beta_1\nW0618 04:34:30.835628 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer.beta_2\nW0618 04:34:30.838559 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer.decay\nW0618 04:34:30.840411 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer.learning_rate\nW0618 04:34:30.842076 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-0.kernel\nW0618 04:34:30.843548 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-0.bias\nW0618 04:34:30.844913 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-1.kernel\nW0618 04:34:30.846393 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-1.bias\nW0618 04:34:30.847731 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-2.kernel\nW0618 04:34:30.848521 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'm' for (root).layer_with_weights-2.bias\nW0618 04:34:30.849898 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-0.kernel\nW0618 04:34:30.851006 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-0.bias\nW0618 04:34:30.852557 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-1.kernel\nW0618 04:34:30.853659 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-1.bias\nW0618 04:34:30.854754 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-2.kernel\nW0618 04:34:30.855880 139677162067712 util.py:252] Unresolved object in checkpoint: (root).optimizer's state 'v' for (root).layer_with_weights-2.bias\nW0618 04:34:30.856939 139677162067712 util.py:260] A checkpoint was restored (e.g. tf.train.Checkpoint.restore or tf.keras.Model.load_weights) but not all checkpointed values were used. See above for specific issues. Use expect_partial() on the load status object, e.g. tf.train.Checkpoint.restore(...).expect_partial(), to silence these warnings, or use assert_consumed() to make the check explicit. See https://www.tensorflow.org/alpha/guide/checkpoints#loading_mechanics for details.\n"
]
],
[
[
"In the next notebook, we will improve this model through feature engineering.",
"_____no_output_____"
],
[
"Copyright 2019 Google Inc.\nLicensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with the License. You may obtain a copy of the License at\nhttp://www.apache.org/licenses/LICENSE-2.0\nUnless required by applicable law or agreed to in writing, software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.",
"_____no_output_____"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
]
] |
4a6653d782d7bf0ad6a0654d2e80e797cb7dbf7d
| 85,814 |
ipynb
|
Jupyter Notebook
|
cube_viz/Barplot_setup_for_pubs.ipynb
|
D-Barradas/DataViz
|
b042e994dc4bbebc29863c7da617d9158e8e4855
|
[
"MIT"
] | null | null | null |
cube_viz/Barplot_setup_for_pubs.ipynb
|
D-Barradas/DataViz
|
b042e994dc4bbebc29863c7da617d9158e8e4855
|
[
"MIT"
] | null | null | null |
cube_viz/Barplot_setup_for_pubs.ipynb
|
D-Barradas/DataViz
|
b042e994dc4bbebc29863c7da617d9158e8e4855
|
[
"MIT"
] | null | null | null | 177.668737 | 28,348 | 0.878004 |
[
[
[
"import pandas as pd \nimport seaborn as sns \nimport matplotlib.pyplot as plt\nsns.set_theme(style=\"whitegrid\")\nsns.set_context(\"paper\")",
"_____no_output_____"
],
[
"my_data = pd.read_csv(\"success_rate_RFC_bm5_normal_docking_2.csv\")",
"_____no_output_____"
],
[
"my_data",
"_____no_output_____"
],
[
"my_data.set_index(\"Scoring\",inplace=True)",
"_____no_output_____"
],
[
"# sns.barplot(data=my_data[my_data[\"Method\"]==\"Pydock\"])\n# sns.barplot(data=my_data[my_data[\"Method\"]==\"Zdock\"])\n# sns.barplot(data=my_data[my_data[\"Method\"]==\"Swardock\"],hue=my_data.index)\nmy_data[my_data[\"Method\"]==\"Pydock\"].plot(kind=\"bar\",color=['#C0C0C0','#A9A9A9','#808080'])",
"_____no_output_____"
],
[
"my_data[my_data[\"Method\"]==\"Zdock\"].plot(kind=\"bar\",color=['#C0C0C0','#A9A9A9','#808080'])",
"_____no_output_____"
],
[
"my_data[my_data[\"Method\"]==\"Swardock\"].plot(xlabel=\" \",kind=\"bar\",color=['#C0C0C0','#A9A9A9','#808080'],rot=0)",
"_____no_output_____"
],
[
"fig, ((ax1, ax2, ax3) ) = plt.subplots(3,1 , sharex=False\n ,sharey=True\n ,figsize=( 5 , 8 )\n# \n )\nmy_data[my_data[\"Method\"]==\"Pydock\"].plot(ax=ax1,title=\"A) Pydock\",\n rot=0 ,\n kind=\"bar\",\n color=['#C0C0C0','#A9A9A9','#808080'],\n xlabel=\"\")\nmy_data[my_data[\"Method\"]==\"Zdock\"].plot(ax=ax2,title=\"B) Zdock\",\n rot=0 ,\n kind=\"bar\",\n color=['#C0C0C0','#A9A9A9','#808080'],\n legend=False,\n xlabel=\"\")\n\n \nmy_data[my_data[\"Method\"]==\"Swardock\"].plot(ax=ax3, title=\"C) Swardock\",\n rot=0,\n kind=\"bar\",\n color=['#C0C0C0','#A9A9A9','#808080'],\n legend=False)\n\nplt.ylabel(\"Percentage\")\nplt.tight_layout()\nplt.savefig(\"../success_rate_CODES_vs_IRAPPA.eps\")",
"The PostScript backend does not support transparency; partially transparent artists will be rendered opaque.\nThe PostScript backend does not support transparency; partially transparent artists will be rendered opaque.\n"
],
[
"my_data = pd.read_csv(\"success_rate_RFC_bm5_normal_docking.csv\")",
"_____no_output_____"
],
[
"my_data.sort_values(\"Top10\", inplace=True)\n# my_data.set_index(\"Scoring\", inplace=True)",
"_____no_output_____"
],
[
"my_data[\"Scoring\"] = ['Pydock',\n 'Zdock',\n 'IRAPPA\\nZdock',\n 'CODES',\n 'Swarmdock',\n 'IRAPPA\\nPydock',\n 'IRAPPA\\nSwardock']",
"_____no_output_____"
],
[
"my_data.set_index(\"Scoring\", inplace=True)",
"_____no_output_____"
],
[
"my_data[\"Top10\"].plot(\n rot=0,\n kind=\"bar\",\n color=['#A9A9A9']\n# legend=False\n )\nplt.ylabel(\"Percentage\")\nplt.tight_layout()\nplt.savefig(\"../success_rate_CODES_combines.eps\")",
"_____no_output_____"
],
[
"# sns.barplot(x=my_data.index,y=\"Top10\",data=my_data)",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a666aa77d49c6881f2183db7fe35d84bb70577c
| 36,544 |
ipynb
|
Jupyter Notebook
|
analysis/SAE.ipynb
|
someshsingh22/TeamEleven
|
bb5acb612cce2a827f7bba3ff28f95c1bceb18fa
|
[
"MIT"
] | 1 |
2020-12-10T17:52:04.000Z
|
2020-12-10T17:52:04.000Z
|
analysis/SAE.ipynb
|
someshsingh22/TeamEleven
|
bb5acb612cce2a827f7bba3ff28f95c1bceb18fa
|
[
"MIT"
] | null | null | null |
analysis/SAE.ipynb
|
someshsingh22/TeamEleven
|
bb5acb612cce2a827f7bba3ff28f95c1bceb18fa
|
[
"MIT"
] | 1 |
2021-05-09T14:10:49.000Z
|
2021-05-09T14:10:49.000Z
| 38.630021 | 202 | 0.455697 |
[
[
[
"import numpy as np\nimport torch\nimport pandas as pd\nimport json\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nfrom sklearn.preprocessing import LabelEncoder as LE\nimport bisect\nimport torch\nfrom datetime import datetime\nfrom sklearn.model_selection import train_test_split",
"_____no_output_____"
],
[
"!cp -r drive/My\\ Drive/T11 ./T11",
"_____no_output_____"
],
[
"np.random.seed(22)\ntorch.manual_seed(22)",
"_____no_output_____"
],
[
"with open('T11/batsmen.json', 'r') as f:\n batsmen = json.load(f)\nwith open('T11/bowlers.json', 'r') as f:\n bowlers = json.load(f)\nbatsmen = {k: [x for x in v if x[1][1]>=0] for k,v in batsmen.items()}\nbatsmen = {k: sorted(v, key=lambda x : x[0]) for k,v in batsmen.items() if v}\nbowlers = {k: sorted(v, key=lambda x : x[0]) for k,v in bowlers.items() if v}",
"_____no_output_____"
],
[
"def getBatScores(scores):\n #runs, balls, boundaries, contribs, out\n array = []\n for score in scores:\n date = score[0]\n _, runs, balls, fours, sixes, _, contrib = score[1]\n boundaries = fours + sixes * 1.5\n array.append((date, np.array([runs, balls, boundaries, contrib])))\n return array\n\ndef getBowlScores(scores):\n #overs, maidens, runs, wickets, contribs\n array = []\n for score in scores:\n date = score[0]\n overs, maidens, runs, wickets, _, contrib = score[1]\n overs = int(overs) + (overs-int(overs))*10/6\n array.append((date, np.array([overs, maidens, runs, wickets, contrib])))\n return array\n\nbatsmen_scores = {k:getBatScores(v) for k,v in batsmen.items()}\nbowlers_scores = {k:getBowlScores(v) for k,v in bowlers.items()}",
"_____no_output_____"
],
[
"_batsmen_scores = {k:{_v[0]: _v[1] for _v in v} for k,v in batsmen_scores.items()}\n_bowlers_scores = {k:{_v[0]: _v[1] for _v in v} for k,v in bowlers_scores.items()}",
"_____no_output_____"
],
[
"att = pd.read_csv('T11/attributes.csv')\natt['BatHand']=0+(att['Bats'].str.find('eft')>0)\natt['BowlHand']=0+(att['Bowls'].str.find('eft')>0)\natt['BowlType']=0+((att['Bowls'].str.find('ast')>0) | (att['Bowls'].str.find('edium')>0))",
"_____no_output_____"
],
[
"def getBatStats(scores):\n dates, scorelist = [score[0] for score in scores], [score[1] for score in scores]\n scorelist = np.array(scorelist)\n cumscores = np.cumsum(scorelist, axis=0)\n innings = np.arange(1, cumscores.shape[0]+1)\n average = cumscores[:, 0]/innings\n sr = cumscores[:, 0]/(cumscores[:, 1]+1)\n contrib = cumscores[:, 3]/innings\n stats = np.array([innings, average, sr, contrib]).T\n return [datetime.strptime(date, \"%Y-%m-%d\") for date in dates], stats\n\ndef getBowlStats(scores):\n dates, scorelist = [score[0] for score in scores], [score[1] for score in scores]\n scorelist = np.array(scorelist)\n cumscores = np.cumsum(scorelist, axis=0)\n overs = cumscores[:, 0]\n overs = overs.astype('int32')+10/6*(overs - overs.astype('int32'))\n runs = cumscores[:, 2]\n economy = runs/overs\n wickets = cumscores[:, 3]\n average = wickets/(runs+1)\n sr = wickets/overs\n contrib = cumscores[:, 4]/np.arange(1, cumscores.shape[0]+1)\n stats = np.array([overs, average, economy, sr, contrib]).T\n return [datetime.strptime(date, \"%Y-%m-%d\") for date in dates], stats",
"_____no_output_____"
],
[
"batsmen_stats = {key:getBatStats(getBatScores(v)) for key,v in batsmen.items()}\nbowlers_stats = {key:getBowlStats(getBowlScores(v)) for key,v in bowlers.items()}",
"_____no_output_____"
],
[
"with open('T11/scorecard.json', 'r') as f:\n scorecards = json.load(f)\nposition = dict()\nfor code, match in scorecards.items():\n for pos, batsmen in enumerate(match['BATTING1']):\n if batsmen[0] in position:\n position[batsmen[0]].append(pos+1)\n else:\n position[batsmen[0]]=[pos+1]\n for pos, batsmen in enumerate(match['BATTING2']):\n if batsmen[0] in position:\n position[batsmen[0]].append(pos+1)\n else:\n position[batsmen[0]]=[pos+1]\n\nposition = {int(k):max(set(v), key = v.count) for k,v in position.items()}\nfor missing in set(att['Code']) - set(position.keys()):\n position[missing]=0",
"_____no_output_____"
],
[
"with open('T11/region.json','r') as f:\n region = json.load(f)\nwith open('T11/tmap.json','r') as f:\n tmap = json.load(f)",
"_____no_output_____"
],
[
"matches = pd.read_csv('T11/matches.csv')\natt['BatPos']=att['Code'].apply(lambda x : position[x])\nmatches['GroundCode']=matches['GroundCode'].apply(lambda x : region[str(x)])\nmatches=matches[pd.to_datetime(matches['Date'], format='%Y-%m-%d')>\"1990-01-01\"]\ndf_cards = pd.DataFrame(scorecards).transpose()\ndf_cards = df_cards[df_cards.index.astype(int).isin(matches['MatchCode'])]\nmatches = matches[matches['MatchCode'].isin(df_cards.index.astype(int))]",
"_____no_output_____"
],
[
"att=pd.get_dummies(att, columns=['BatPos'])\nle = {\n 'GC' : LE(),\n 'Team' : LE(),\n 'Venue' : LE(),\n }\nle['Team'].fit((matches['Team_1'].tolist())+(matches['Team_2'].tolist()))\nmatches['Team_1']=le['Team'].transform(matches['Team_1'])\nmatches['Team_2']=le['Team'].transform(matches['Team_2'])\nmatches['Venue']=le['Venue'].fit_transform(matches['Venue'])\nmatches['GroundCode']=le['GC'].fit_transform(matches['GroundCode'])\nmatches",
"_____no_output_____"
],
[
"patts = att[['BatHand', 'BowlHand', 'BowlType', 'BatPos_0', 'BatPos_1', 'BatPos_2', 'BatPos_3', 'BatPos_4', 'BatPos_5', 'BatPos_6', 'BatPos_7', 'BatPos_8', 'BatPos_9', 'BatPos_10']].values\npcodes = att['Code'].tolist()\nattdict = dict()\nfor i,pc in enumerate(pcodes):\n attdict[pc]=patts[i]",
"_____no_output_____"
],
[
"df_cards['MatchCode']=df_cards.index.astype(int)\nmatches=matches.sort_values(by='MatchCode')\ndf_cards=df_cards.sort_values(by='MatchCode')\ndf_cards.reset_index(drop=True, inplace=True)\nmatches.reset_index(drop=True, inplace=True)\ndf_cards['BAT2']=le['Team'].transform(df_cards['ORDER'].apply(lambda x : tmap[x[1]]))\ndf_cards['BAT1']=le['Team'].transform(df_cards['ORDER'].apply(lambda x : tmap[x[0]]))\ndf_cards['RUN1']=df_cards['SCORES'].apply(lambda x : x[0])\ndf_cards['RUN2']=df_cards['SCORES'].apply(lambda x : x[1])\ndf_cards['TOSS']=le['Team'].transform(df_cards['TOSS'].apply(lambda x : tmap[x]))\ndf = pd.merge(matches, df_cards)\ndf['PLAYERS1']=df['BATTING1'].apply(lambda x : [y[0] for y in x])\ndf['PLAYERS2']=df['BATTING2'].apply(lambda x : [y[0] for y in x])",
"_____no_output_____"
],
[
"_BAT1, _BAT2, _BOW1, _BOW2 = df['PLAYERS1'].tolist(), df['PLAYERS2'].tolist(), [[_x[0] for _x in x] for x in df['BOWLING1'].tolist()], [[_x[0] for _x in x] for x in df['BOWLING2'].tolist()]\nfor i in range(len(_BAT1)):\n try:\n _BAT1[i].append(list(set(_BOW2[i])-set(_BAT1[i]))[0])\n _BAT2[i].append(list(set(_BOW1[i])-set(_BAT2[i]))[0])\n except:\n pass\ndf['PLAYERS1'], df['PLAYERS2'] = _BAT1, _BAT2\ndf=df[['Date', 'Team_1', 'Team_2', 'Venue', 'GroundCode', 'TOSS', 'BAT1', 'BAT2', 'RUN1', 'RUN2', 'PLAYERS1', 'PLAYERS2']]",
"_____no_output_____"
],
[
"df=df[df['PLAYERS1'].apply(lambda x : len(x)==11) & df['PLAYERS2'].apply(lambda x : len(x)==11)]\ndf.reset_index(drop=True, inplace=True)",
"_____no_output_____"
],
[
"Team_1, Team_2, BAT1, BAT2, BOWL1, BOWL2= [], [], [], [], [], []\nfor t1,t2,b1,b2 in zip(df['Team_1'].tolist(), df['Team_2'].tolist(), df['BAT1'].tolist(), df['BAT2'].tolist()):\n if b1==t1:\n Team_1.append(t1)\n Team_2.append(t2)\n else:\n Team_1.append(t2)\n Team_2.append(t1)\ndf['Team_1']=Team_1\ndf['Team_2']=Team_2\ndf.drop(['BAT1', 'BAT2', 'Venue'],axis=1, inplace=True)",
"_____no_output_____"
],
[
"def getStats(code, date):\n _date = datetime.strptime(date, \"%Y-%m-%d\")\n if code in batsmen_stats:\n i = bisect.bisect_left(batsmen_stats[code][0], _date)-1\n if i == -1:\n bat = np.zeros(4)\n else:\n bat = batsmen_stats[code][1][i]\n else:\n bat = np.zeros(4)\n\n if code in bowlers_stats:\n i = bisect.bisect_left(bowlers_stats[code][0], _date)-1\n if i == -1:\n bowl = np.zeros(5)\n else:\n bowl = bowlers_stats[code][1][i]\n else:\n bowl = np.zeros(5)\n if int(code) in attdict:\n patt = attdict[int(code)]\n else:\n patt = np.zeros(14)\n stats = np.concatenate([bat, bowl, patt])\n return stats\n\ndef getScores(code, date):\n if code in _batsmen_scores and date in _batsmen_scores[code]:\n bat = _batsmen_scores[code][date]\n else:\n bat = np.zeros(4)\n if code in _bowlers_scores and date in _bowlers_scores[code]:\n bowl = _bowlers_scores[code][date]\n else:\n bowl = np.zeros(5)\n return np.concatenate([bat, bowl])",
"_____no_output_____"
],
[
"P1, P2, Dates = df['PLAYERS1'].tolist(), df['PLAYERS2'].tolist(), df['Date'].tolist()\nPStats1, PStats2 = [[getStats(p, date) for p in team] for team,date in zip(P1,Dates)], [[getStats(p, date) for p in team] for team,date in zip(P2,Dates)]\nPScores1, PScores2 = [[getScores(p, date) for p in team] for team,date in zip(P1,Dates)], [[getScores(p, date) for p in team] for team,date in zip(P2,Dates)]",
"_____no_output_____"
],
[
"def getNRR(matchcode):\n card = scorecards[matchcode]\n run1, run2 = card['SCORES']\n overs = sum([int(b[1]) + 10/6*(b[1]-int(b[1])) for b in card['BOWLING2']])\n allout = not (len(card['BATTING2'][-1][1])<2 or ('not' in card['BATTING2'][-1][1]))\n if allout:\n overs=50\n return abs((run1/50) - (run2/overs))\ndf['NRR']=matches['MatchCode'].apply(lambda x : getNRR(str(x)))\ndf['TEAM1WIN']=0\ndf['TEAM1WIN'][df['RUN1']>df['RUN2']]=1\ndf_0=df[df['TEAM1WIN']==0]\ndf_1=df[df['TEAM1WIN']==1]\ndf_0['NRR']=-df_0['NRR']\ndf=(df_0.append(df_1)).sort_index()",
"/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: \nA value is trying to be set on a copy of a slice from a DataFrame\n\nSee the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n # This is added back by InteractiveShellApp.init_path()\n/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:14: SettingWithCopyWarning: \nA value is trying to be set on a copy of a slice from a DataFrame.\nTry using .loc[row_indexer,col_indexer] = value instead\n\nSee the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n \n"
],
[
"nPStats1, nPStats2, nPScores1, nPScores2 = np.array(PStats1), np.array(PStats2), np.array(PScores1), np.array(PScores2)",
"_____no_output_____"
],
[
"StatMaxes = np.max(np.concatenate([nPStats1, nPStats2]), axis=(0,1))\ndfStats_N1 = nPStats1/StatMaxes\ndfStats_N2 = nPStats2/StatMaxes\nScoreMaxes = np.max(np.concatenate([nPScores1, nPScores2]), axis=(0,1))\ndfScores_N1 = nPScores1/ScoreMaxes\ndfScores_N2 = nPScores2/ScoreMaxes\nNRRMax = np.max(df['NRR'])\ndf['NRR']=df['NRR']/NRRMax",
"_____no_output_____"
],
[
"nnPStats1 = np.concatenate([dfStats_N1, dfStats_N2],axis=0)\nnnPStats2 = np.concatenate([dfStats_N2, dfStats_N1],axis=0)\nnnPScores1 = np.concatenate([dfScores_N1, dfScores_N2],axis=0)\nnnPScores2 = np.concatenate([dfScores_N2, dfScores_N1],axis=0)\n_NRR = np.concatenate([df['NRR'].values, -df['NRR'].values])",
"_____no_output_____"
],
[
"train_idx, test_idx = train_test_split(np.arange(2*len(df)), test_size=0.1)",
"_____no_output_____"
],
[
"import torch.nn as nn\nimport torch\nfrom torch import optim\nclass AE(nn.Module):\n def __init__(self, input_shape=12, output_shape=1, hidden=16, dropout=0.2):\n super(AE, self).__init__()\n self.hidden = hidden\n self.input_shape = input_shape\n self.output_shape = output_shape\n self.noise = GaussianNoise(sigma=0.1)\n self.player_encoder = nn.Sequential(\n nn.Linear(input_shape, hidden),\n nn.Tanh(),\n nn.Dropout(dropout),\n nn.Linear(hidden, hidden),\n nn.Tanh(),\n nn.Dropout(dropout),\n )\n\n self.score_regressor = nn.Sequential(\n nn.Linear(hidden, 9),\n nn.Tanh(),\n )\n\n self.decoder = nn.Sequential(\n nn.Linear(hidden, input_shape)\n )\n\n self.team_encoder = nn.Sequential(\n nn.Linear(11*hidden, hidden*4),\n nn.Tanh(),\n nn.Dropout(dropout),\n )\n\n self.nrr_regressor = nn.Sequential(\n nn.Linear(hidden*8, hidden*2),\n nn.Tanh(),\n nn.Dropout(dropout),\n nn.Linear(hidden*2, output_shape),\n nn.Tanh(),\n )\n\n def forward(self, x1, x2):\n encoded1, decoded1, scores1 = [], [], []\n encoded2, decoded2, scores2 = [], [], []\n for i in range(11):\n e1 = self.player_encoder(x1[:,i,:])\n d1 = self.decoder(e1)\n e2 = self.player_encoder(x2[:,i,:])\n d2 = self.decoder(e2)\n noise = (0.1**0.5)*torch.randn(e1.size())\n e1, e2 = e1 + noise, e2 + noise\n scores1.append(self.score_regressor(e1))\n scores2.append(self.score_regressor(e2))\n encoded1.append(e1)\n decoded1.append(d1)\n encoded2.append(e2)\n decoded2.append(d2)\n team1, team2 = self.team_encoder(torch.cat(tuple(encoded1), axis=1)), self.team_encoder(torch.cat(tuple(encoded2), axis=1))\n out = self.nrr_regressor(torch.cat((team1, team2), axis=1))\n decoded=torch.cat(tuple(decoded1 + decoded2), axis=1)\n scores1=torch.cat(tuple(scores1),axis=1)\n scores2=torch.cat(tuple(scores2),axis=1)\n return decoded, out, scores1, scores2",
"_____no_output_____"
],
[
"model = AE(dropout=0.3) \ncriterion = nn.MSELoss()\nED_Loss_train, NRR_Loss_train, Player_Loss_train = [], [], []\nED_Loss_test, NRR_Loss_test, Player_Loss_test = [], [], [] \noptimizer = optim.RMSprop(model.parameters(), lr=3e-4, )\nepochs = 10000\nfor epoch in range(1,epochs+1): \n model.train()\n inputs1 = torch.FloatTensor(nnPStats1[:,:,:12][train_idx])\n inputs2 = torch.FloatTensor(nnPStats2[:,:,:12][train_idx])\n outputs = torch.FloatTensor(_NRR[train_idx].reshape(-1,1)) \n optimizer.zero_grad()\n decoded, out, scores1, scores2 = model(inputs1, inputs2)\n inp = (inputs1).view(train_idx.shape[0], -1), (inputs2).view(train_idx.shape[0], -1)\n loss1 = criterion(decoded, torch.cat(inp, axis=1)) \n loss2 = criterion(out, outputs)\n loss3 = criterion(scores1, torch.FloatTensor(nnPScores1[train_idx]).view(train_idx.shape[0], -1))\n loss4 = criterion(scores2, torch.FloatTensor(nnPScores2[train_idx]).view(train_idx.shape[0], -1))\n loss = 1e-5*loss1 + 1*loss2 + 1e-3*(loss3 + loss4)\n loss.backward()\n ED_Loss_train.append(loss1.item())\n NRR_Loss_train.append(loss2.item())\n Player_Loss_train.append((loss3.item()+loss4.item())/2)\n optimizer.step()\n if epoch%100==0:\n print(f\"Epoch {epoch}/{epochs}\")\n print(\"Train Losses Decoder: %0.3f NRR: %0.3f Player Performance %0.3f\" % (loss1.item(), loss2.item(), (loss3.item()+loss4.item())/2))\n model.eval()\n inputs1 = torch.FloatTensor(nnPStats1[:,:,:12][test_idx])\n inputs2 = torch.FloatTensor(nnPStats2[:,:,:12][test_idx])\n outputs = torch.FloatTensor(_NRR[test_idx].reshape(-1,1))\n decoded, out, scores1, scores2 = model(inputs1, inputs2)\n inp = (inputs1).view(test_idx.shape[0], -1), (inputs2).view(test_idx.shape[0], -1)\n loss1 = criterion(decoded, torch.cat(inp, axis=1)) \n loss2 = criterion(out, outputs)\n loss3 = criterion(scores1, torch.FloatTensor(nnPScores1[test_idx]).view(test_idx.shape[0], -1))\n loss4 = criterion(scores2, torch.FloatTensor(nnPScores2[test_idx]).view(test_idx.shape[0], -1))\n ED_Loss_test.append(loss1.item())\n print(\"Validation Losses Decoder: %0.3f NRR: %0.3f Player Performance: %0.3f\" % (loss1.item(), loss2.item(), (loss3.item()+loss4.item())/2))\n NRR_Loss_test.append(loss2.item())\n out, outputs = out.detach().numpy(), outputs.detach().numpy()\n Player_Loss_test.append((loss3.item()+loss4.item())/2)\n acc=100*np.sum((out*outputs)>0)/out.shape[0]\n print(\"Val Accuracy: %0.3f\" % acc)\n",
"Epoch 10/10000\nTrain Losses Decoder: 0.126 NRR: 0.020 Player Performance 0.086\nValidation Losses Decoder: 0.116 NRR: 0.013 Player Performance: 0.079\nVal Accuracy: 48.013\nEpoch 20/10000\nTrain Losses Decoder: 0.119 NRR: 0.018 Player Performance 0.079\nValidation Losses Decoder: 0.110 NRR: 0.012 Player Performance: 0.074\nVal Accuracy: 50.497\nEpoch 30/10000\nTrain Losses Decoder: 0.114 NRR: 0.018 Player Performance 0.075\nValidation Losses Decoder: 0.106 NRR: 0.012 Player Performance: 0.070\nVal Accuracy: 51.325\nEpoch 40/10000\nTrain Losses Decoder: 0.110 NRR: 0.017 Player Performance 0.072\nValidation Losses Decoder: 0.102 NRR: 0.012 Player Performance: 0.067\nVal Accuracy: 51.159\nEpoch 50/10000\nTrain Losses Decoder: 0.107 NRR: 0.017 Player Performance 0.069\nValidation Losses Decoder: 0.099 NRR: 0.012 Player Performance: 0.064\nVal Accuracy: 50.828\nEpoch 60/10000\nTrain Losses Decoder: 0.104 NRR: 0.016 Player Performance 0.066\nValidation Losses Decoder: 0.096 NRR: 0.012 Player Performance: 0.062\nVal Accuracy: 51.490\nEpoch 70/10000\nTrain Losses Decoder: 0.102 NRR: 0.016 Player Performance 0.064\nValidation Losses Decoder: 0.094 NRR: 0.012 Player Performance: 0.060\nVal Accuracy: 50.828\nEpoch 80/10000\nTrain Losses Decoder: 0.100 NRR: 0.016 Player Performance 0.062\nValidation Losses Decoder: 0.092 NRR: 0.012 Player Performance: 0.058\nVal Accuracy: 53.311\nEpoch 90/10000\nTrain Losses Decoder: 0.098 NRR: 0.016 Player Performance 0.061\nValidation Losses Decoder: 0.090 NRR: 0.012 Player Performance: 0.057\nVal Accuracy: 47.020\nEpoch 100/10000\nTrain Losses Decoder: 0.096 NRR: 0.016 Player Performance 0.059\nValidation Losses Decoder: 0.088 NRR: 0.012 Player Performance: 0.056\nVal Accuracy: 54.801\nEpoch 110/10000\nTrain Losses Decoder: 0.094 NRR: 0.016 Player Performance 0.058\nValidation Losses Decoder: 0.087 NRR: 0.012 Player Performance: 0.054\nVal Accuracy: 48.344\nEpoch 120/10000\nTrain Losses Decoder: 0.093 NRR: 0.015 Player Performance 0.056\nValidation Losses Decoder: 0.085 NRR: 0.012 Player Performance: 0.053\nVal Accuracy: 53.642\nEpoch 130/10000\nTrain Losses Decoder: 0.091 NRR: 0.015 Player Performance 0.055\nValidation Losses Decoder: 0.084 NRR: 0.012 Player Performance: 0.052\nVal Accuracy: 52.483\nEpoch 140/10000\nTrain Losses Decoder: 0.090 NRR: 0.015 Player Performance 0.054\nValidation Losses Decoder: 0.083 NRR: 0.012 Player Performance: 0.051\nVal Accuracy: 53.974\nEpoch 150/10000\nTrain Losses Decoder: 0.089 NRR: 0.015 Player Performance 0.053\nValidation Losses Decoder: 0.082 NRR: 0.012 Player Performance: 0.050\nVal Accuracy: 52.483\nEpoch 160/10000\nTrain Losses Decoder: 0.087 NRR: 0.015 Player Performance 0.052\nValidation Losses Decoder: 0.080 NRR: 0.012 Player Performance: 0.049\nVal Accuracy: 50.166\n"
],
[
"sns.lineplot(x=np.arange(1,10001), y=ED_Loss_train)\nsns.lineplot(x=np.arange(1,10001,50), y=ED_Loss_test)",
"_____no_output_____"
],
[
"sns.lineplot(x=np.arange(1,10001), y=NRR_Loss_train)\nsns.lineplot(x=np.arange(1,10001,50), y=NRR_Loss_test)",
"_____no_output_____"
],
[
"sns.lineplot(x=np.arange(1,10001), y=Player_Loss_train)\nsns.lineplot(x=np.arange(1,10001,50), y=Player_Loss_test)",
"_____no_output_____"
],
[
"",
"_____no_output_____"
]
]
] |
[
"code"
] |
[
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
4a6671601b248c6f16550e77546bf831456ff7aa
| 6,827 |
ipynb
|
Jupyter Notebook
|
docs/README.ipynb
|
MIOsoft/CaptureFile-Python
|
39094e0e329dcae5a597c6409d785d7c4cbd5c04
|
[
"MIT"
] | null | null | null |
docs/README.ipynb
|
MIOsoft/CaptureFile-Python
|
39094e0e329dcae5a597c6409d785d7c4cbd5c04
|
[
"MIT"
] | null | null | null |
docs/README.ipynb
|
MIOsoft/CaptureFile-Python
|
39094e0e329dcae5a597c6409d785d7c4cbd5c04
|
[
"MIT"
] | null | null | null | 30.07489 | 122 | 0.599385 |
[
[
[
"# CaptureFile - Transactional record logging library\n\n## Overview\n\nCapture files are compressed transactional record logs and by convention use the\nextension \".capture\". Records can be appended but not modified and are\nexplicitly committed to the file.\n\nAny records that are added but not committed will not be visible to other\nprocesses and will be lost if the process that added them stops or otherwise\ncloses the capture file before committing. All records that were added between\ncommits either become available together or, if the commit fails, are discarded\ntogether. This is true even if the file buffers were flushed to disk as the\nnumber of records added between commits grew.\n\nRecords in a capture file are each of arbitrary length and can contain up to 4GB\n(2³² bytes) of binary data.\n\nCapture files can quickly retrieve any record by its sequential record number.\nThis is true even with trillions of records.\n\nMetadata can be attached to and read from a capture file. The current metadata\nreference is replaced by any subsequent metadata writes. A metadata update is\nalso transactional and will be committed together with any records that were\nadded between commits.\n\nConcurrent read access is supported for multiple threads and OS processes.\n\nOnly one writer is permitted at a time.\n\nThis is a pure Python implementation with no dependencies beyond the Python\nstandard library. Development build dependencies are listed in requirements.txt.\n\nClick here for the implementation language independent [internal details of the\ndesign and the data structures\nused](https://github.com/MIOsoft/CaptureFile-Python/blob/master/docs/DESIGN.md).\n\nClick here for a detailed description of the [Python CaptureFile\nAPI](https://github.com/MIOsoft/CaptureFile-Python/blob/master/docs/CaptureFile.CaptureFile.md).\nThe detailed description covers several useful APIs and parameters that are not\ncovered in the Quickstart below.\n\nTo work with capture files visually, you can use the free [MIObdt](https://miosoft.com/miobdt/) application.\n\n## Install\n\n```\npip install CaptureFile\n```\n\n## Quickstart\n\n### Example 1. Creating a new capture file and then adding and committing some records to it.",
"_____no_output_____"
]
],
[
[
"from CaptureFile import CaptureFile\n\n# in the **existing** sibling \"TempTestFiles\" folder create a new empty capture file\ncf = CaptureFile(\"../TempTestFiles/Test.capture\", to_write=True, force_new_empty_file=True)\n\n# add five records to the capture file\ncf.add_record(\"Hey this is my record 1\")\ncf.add_record(\"Hey this is my record 2\")\ncf.add_record(\"Hey this is my record 3\")\ncf.add_record(\"Hey this is my record 4\")\ncf.add_record(\"Hey this is my record 5\")\n\n# commit records to capture file\ncf.commit()\n\nprint(f\"There are {cf.record_count()} records in this capture file.\")\n\n# close the capture file\ncf.close()",
"There are 5 records in this capture file.\n"
]
],
[
[
"### Example 2. Reading a record from the capture file created above.",
"_____no_output_____"
]
],
[
[
"from CaptureFile import CaptureFile\n\n# open existing capture file for reading\ncf = CaptureFile(\"../TempTestFiles/Test.capture\")\n\n# retrieve the second record from the capture file\nrecord = cf.record_at(2)\nprint(record)\n\n# close the capture file\ncf.close()",
"Hey this is my record 2\n"
]
],
[
[
"### Example 3. Opening an existing capture file and then reading a range of records from it.",
"_____no_output_____"
]
],
[
[
"from CaptureFile import CaptureFile\n\n# open existing capture file for reading\ncf = CaptureFile(\"../TempTestFiles/Test.capture\")\n\n# retrieve and print records 2 to 3\nprint(cf[2:4])\n\n# close the capture file\ncf.close()",
"['Hey this is my record 2', 'Hey this is my record 3']\n"
]
],
[
[
"### Example 4. Opening an existing capture file using a context manager and then iterating over all records from it.",
"_____no_output_____"
]
],
[
[
"from CaptureFile import CaptureFile\n\n# open existing capture file for reading using a context manager\n# so no need to close the capture file\nwith CaptureFile(\"../TempTestFiles/Test.capture\") as cf:\n\n #iterate over and print all records\n for record in iter(cf):\n print(record)",
"Hey this is my record 1\nHey this is my record 2\nHey this is my record 3\nHey this is my record 4\nHey this is my record 5\n"
]
]
] |
[
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] |
[
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.