hexsha
stringlengths
40
40
size
int64
6
14.9M
ext
stringclasses
1 value
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
6
260
max_stars_repo_name
stringlengths
6
119
max_stars_repo_head_hexsha
stringlengths
40
41
max_stars_repo_licenses
list
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
6
260
max_issues_repo_name
stringlengths
6
119
max_issues_repo_head_hexsha
stringlengths
40
41
max_issues_repo_licenses
list
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
6
260
max_forks_repo_name
stringlengths
6
119
max_forks_repo_head_hexsha
stringlengths
40
41
max_forks_repo_licenses
list
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
avg_line_length
float64
2
1.04M
max_line_length
int64
2
11.2M
alphanum_fraction
float64
0
1
cells
list
cell_types
list
cell_type_groups
list
cbfc1a2ae9575c6e59c84d1f5be284b5ae8f150d
1,026,267
ipynb
Jupyter Notebook
CNN using CIFAR50/CNN, Bag of tricks_CIFAR50.ipynb
joy6543/Deep-Learning
942f208976c6ab734eaee23adf273a06ac748bed
[ "MIT" ]
null
null
null
CNN using CIFAR50/CNN, Bag of tricks_CIFAR50.ipynb
joy6543/Deep-Learning
942f208976c6ab734eaee23adf273a06ac748bed
[ "MIT" ]
null
null
null
CNN using CIFAR50/CNN, Bag of tricks_CIFAR50.ipynb
joy6543/Deep-Learning
942f208976c6ab734eaee23adf273a06ac748bed
[ "MIT" ]
null
null
null
513,133.5
1,026,266
0.801162
[ [ [ "from __future__ import print_function\n\nimport argparse\nimport os\nimport random\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport torchvision.datasets as dset\nimport torchvision.transforms as transforms\nimport torchvision.utils as vutils\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nfrom IPython.display import HTML\nfrom matplotlib import cm\nimport torchvision\nimport torchvision.datasets as datasets\n\n# Set random seed for reproducibility\nmanualseed = 43\nrandom.seed(manualseed)\ntorch.manual_seed(manualseed)", "_____no_output_____" ], [ "from google.colab import drive\ndrive.mount('/content/gdrive')", "Mounted at /content/gdrive\n" ], [ "!unzip /content/gdrive/MyDrive/cifar50.zip -d /content/gdrive/MyDrive/CIFAR50", "\u001b[1;30;43mStreaming output truncated to the last 5000 lines.\u001b[0m\n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23431.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23464.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23624.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23713.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23874.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_23959.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24035.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24818.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24922.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_24958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2550.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2857.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_2975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3112.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3127.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3151.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3301.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3841.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_3849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4138.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4165.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_4985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5440.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5509.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5513.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5520.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5705.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5762.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_5975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6452.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_651.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6525.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6641.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6692.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6760.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_6990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7031.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7039.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7289.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_77.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7711.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_7973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8211.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8410.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8431.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_8717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9070.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9418.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9488.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_959.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9846.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/sunflower/train_9857.jpg \n creating: /content/gdrive/MyDrive/CIFAR50/images/table/\n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1706.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_1989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2268.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2704.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_2986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3315.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3455.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_3991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4082.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4473.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_448.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4686.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_4957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_786.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/test_966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10003.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10485.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10536.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_10990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11612.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_11988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12330.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12641.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12646.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12831.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_12938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13083.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13510.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_13937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1411.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14189.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14312.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14358.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14393.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14517.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14572.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14700.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_14909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1532.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15447.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15919.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_15959.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16127.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16291.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16330.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16705.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16740.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1679.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_16907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1692.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17075.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_174.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17411.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17464.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17706.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17759.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17764.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_17959.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18000.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18044.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18118.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18502.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_18991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19185.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19222.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19237.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19485.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19540.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1955.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_19871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_1998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2038.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20829.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_20858.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21449.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21525.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21615.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_21773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22241.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22289.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22680.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22691.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_22951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23131.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23740.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_23982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24044.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24230.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24234.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24675.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2469.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24727.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24763.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_24951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2639.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_270.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2839.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_2972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_365.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3891.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_3999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_44.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4549.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4615.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_4919.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5140.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5209.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5441.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5607.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5675.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5693.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5724.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_5904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6046.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6059.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6215.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6444.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6454.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6605.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6613.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6679.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_6976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7185.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7296.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_731.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7508.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_7979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8055.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8215.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8406.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8415.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8447.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8575.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_8961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_9047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_9243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_9352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_9481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_9665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_9675.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/table/train_9763.jpg \n creating: /content/gdrive/MyDrive/CIFAR50/images/telephone/\n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1536.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_1922.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2093.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_2973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3024.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3278.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3498.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3742.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_3987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_406.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4137.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_4983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_764.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/test_901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10599.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10742.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_10984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1103.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11070.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11185.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11391.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11413.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11599.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11673.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11891.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_11949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12174.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12312.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12525.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12550.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_12962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13291.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1363.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_13987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14012.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14324.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14510.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14549.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14613.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14704.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_14931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15239.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15759.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_15927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16027.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16411.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16447.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16642.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_16889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1700.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17050.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17239.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17846.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_17855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_18938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19061.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19368.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19575.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19578.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_1987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_19993.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20280.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_20997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21027.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21165.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21993.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_21999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22107.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22237.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2225.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22296.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22470.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22550.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_22995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23019.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23035.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23057.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23140.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2329.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23329.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23791.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23846.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23858.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23955.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_23973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24154.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24280.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_24972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2575.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2649.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2823.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_2903.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_3940.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4185.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4440.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_469.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_4972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_535.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_5931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6440.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6575.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6652.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_670.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_6775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7122.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_716.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7505.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7549.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_758.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7922.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7932.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_7937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8157.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8477.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8596.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_8960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9268.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9505.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9520.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_96.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_9909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_994.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/telephone/train_997.jpg \n creating: /content/gdrive/MyDrive/CIFAR50/images/tiger/\n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1203.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1448.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1636.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1793.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_1941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2061.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2214.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2418.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2618.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2636.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_2997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3015.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3054.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3502.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3863.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_391.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_3956.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_4772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_621.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_703.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_978.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/test_991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10034.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10454.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_10879.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1183.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11884.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_11905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12028.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12082.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12444.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12465.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12724.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12727.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12933.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_12946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13114.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13122.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13469.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13673.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13735.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13791.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_13970.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14050.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14437.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14452.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_14979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15234.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15374.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15552.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15618.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1573.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_15997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16083.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16165.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1621.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16219.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16312.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_16924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17038.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_17808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1818.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18454.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18470.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18612.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18867.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_18997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19012.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19075.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19229.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1950.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19731.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_1995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19964.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_19966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20022.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_201.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20301.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20706.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20959.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_20996.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21003.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21157.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21201.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_21926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22220.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22255.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22543.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_22983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23122.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23535.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23552.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_23992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24054.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24240.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24929.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_24962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2561.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2693.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2928.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_2950.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3023.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3472.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3535.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3738.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3858.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_3977.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4404.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4406.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4488.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4649.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4745.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_4999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5054.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5132.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5214.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_54.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5413.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5473.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5686.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5831.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_5991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6028.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6036.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6103.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_6915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7000.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7127.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7237.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7456.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7831.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_7995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8361.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8498.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8581.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8675.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_8973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9110.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9174.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9444.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9642.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9735.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9826.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tiger/train_9998.jpg \n creating: /content/gdrive/MyDrive/CIFAR50/images/train/\n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1036.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1083.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1328.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1589.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1667.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_1952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2605.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2727.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_28.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_2961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_315.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3201.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3351.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3411.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3449.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_3981.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4209.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_4992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_646.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_716.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/test_98.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10535.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_107.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10864.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_10998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11064.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_11946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12191.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12545.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12760.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12771.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_12995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13053.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13106.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13189.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_132.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13415.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13575.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13652.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13738.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_13844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14093.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1413.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14241.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14524.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14551.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14955.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_14991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15053.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15686.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15727.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_15943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16389.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1639.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_16934.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1705.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17059.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17470.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17857.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_17980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18031.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18270.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18642.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18709.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_18992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19240.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19524.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19574.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_19890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_1997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2012.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20134.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2034.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20431.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20494.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2082.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_20949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2112.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21132.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21220.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21415.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21667.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_21966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_220.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22137.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22338.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22363.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22370.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22440.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22919.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_22992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23067.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23363.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23465.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_23887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24012.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24241.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24670.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24703.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24738.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_24985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2572.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2581.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2605.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_2988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3067.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3412.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3424.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3437.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3550.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3711.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3874.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3879.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_3988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4237.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4661.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_4884.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5137.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5212.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5498.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5501.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5505.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5536.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5839.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_5995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6131.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6368.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6465.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_6967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7076.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7430.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_7960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8545.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8649.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8656.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8933.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_8954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9038.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9067.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9351.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9455.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9607.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9652.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_9670.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/train/train_978.jpg \n creating: /content/gdrive/MyDrive/CIFAR50/images/tulip/\n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1024.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1494.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_1974.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2234.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2255.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2328.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_26.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2626.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_2895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3022.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_346.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3513.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3724.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_3988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4134.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4704.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_48.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_4969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_5.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_561.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_612.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_639.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/test_994.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10551.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_10910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11234.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_11951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12031.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12289.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12625.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12940.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_12982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13183.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13191.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13270.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1341.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13454.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13490.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_13975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1451.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14574.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14793.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_14935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15203.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_15803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16028.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1628.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16566.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16706.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16929.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_16933.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17054.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17055.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17503.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17535.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1763.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17656.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17867.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17899.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17922.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_17964.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18064.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18112.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_1821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18351.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18412.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18495.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18636.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18933.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_18971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19003.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19370.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19410.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19444.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19652.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_19983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20264.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20364.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20551.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_20986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21022.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21053.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21131.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2118.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_21946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22621.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22762.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_22975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23064.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23230.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23578.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23948.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_23978.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2410.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24404.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24615.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24650.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24734.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_24753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2549.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_2987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_30.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3034.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3340.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3494.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_363.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_370.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_3979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4064.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4418.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4520.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4709.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_4908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5197.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_524.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_550.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5593.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_5982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6498.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6566.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6624.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6642.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6934.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_6981.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7197.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7853.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7864.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_7947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8039.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8196.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8201.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8329.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8413.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8577.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8723.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8760.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8762.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8764.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_8952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_899.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9318.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9571.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9615.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9932.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/tulip/train_9971.jpg \n creating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/\n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1503.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_1986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2577.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_2896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3212.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3577.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3740.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3760.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3826.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_3930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_404.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4291.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_4872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_508.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_679.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_87.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_914.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/test_965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10131.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10473.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1061.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10693.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_10979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11358.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11374.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_11995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12034.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12440.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12734.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_12869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13495.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_13938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14211.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14758.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_14951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15607.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15737.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15914.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15970.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_15993.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16239.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16572.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_16917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17278.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_17821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_18011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_18040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_18113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_18147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_18202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_1849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_18530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_18583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_18944.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19268.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19588.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_19949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20138.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20448.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20456.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20899.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_20980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21621.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21704.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_21861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22219.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22578.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22581.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22646.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22700.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22705.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_22957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23229.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23505.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23559.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2365.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23716.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_23990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24374.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24651.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24964.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_24992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2502.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2589.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2596.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2651.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_2976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3280.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3463.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3607.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_3920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4019.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4280.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4449.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_451.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4532.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_46.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_464.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4686.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4934.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_4991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5338.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5374.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5742.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_5924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_652.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6523.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6964.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_6985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7346.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7680.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7692.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7770.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_7977.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8015.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8077.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8771.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8858.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_8955.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9046.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9138.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9400.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_95.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9661.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9734.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/wardrobe/train_9964.jpg \n creating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/\n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1118.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1229.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1711.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1769.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_1954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2010.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_211.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_2915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3059.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3191.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3503.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3693.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_3999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4400.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_454.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4670.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_470.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_4991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_505.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_624.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_793.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/test_996.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10464.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_10951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11000.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11061.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11196.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11268.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_114.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11456.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11652.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_11925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1264.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12735.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_12961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1340.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1351.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13513.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13903.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_13924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14106.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14191.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14236.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14289.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14485.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14641.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14919.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_14946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15023.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15078.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15437.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15465.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15549.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15642.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15758.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_15900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16523.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16731.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16863.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_16986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17058.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17078.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17134.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17154.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17183.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17324.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1740.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17853.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_17979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18093.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18110.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18241.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_1839.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18517.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18680.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_18936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19239.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19566.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19612.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19758.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_19996.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20341.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20374.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20693.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2077.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20948.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_20982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21067.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21070.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21189.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21214.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21296.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21510.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21545.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21628.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_21992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22189.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22566.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22857.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22864.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_22953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23075.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23225.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23641.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23731.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23853.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_23968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24015.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24161.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24445.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24700.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24919.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24956.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2498.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_24990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2703.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_2948.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3031.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3140.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3222.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3626.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3667.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_3951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4424.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4621.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_4846.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5077.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5361.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5393.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5793.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_59.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_5998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6607.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6829.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_6881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7019.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7022.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7053.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7112.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7364.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7571.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7735.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7769.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_7998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8264.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8673.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_8998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9075.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9253.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9448.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9656.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/willow_tree/train_9970.jpg \n creating: /content/gdrive/MyDrive/CIFAR50/images/woman/\n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1058.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1400.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1404.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1934.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_1983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2219.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2449.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2517.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_2802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3874.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_3883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4157.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4451.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4770.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4781.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4863.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_4924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_737.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_84.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_97.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/test_998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10036.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10057.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10107.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10211.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1050.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_10997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11010.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11667.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_11904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12270.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12370.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_12991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13477.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13607.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1368.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_13994.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14038.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14151.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14445.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14465.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14723.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_14961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15715.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_158.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15829.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_15974.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1625.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16543.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16559.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_16985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17137.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17523.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17540.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_17962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18035.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18401.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18445.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_18665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_1877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19050.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19233.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19452.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19543.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19742.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_19860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20000.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20003.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20140.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20270.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20389.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20490.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20509.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20552.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20581.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20731.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_20960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21010.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21174.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21981.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_21996.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22552.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22724.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22745.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_22935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23023.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23027.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23076.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23106.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23161.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23361.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23540.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23864.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23928.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_23980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_241.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24132.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24158.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24239.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24268.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24723.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_24965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_27.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_289.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2891.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_2979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3566.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3618.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_3917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4157.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_431.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4364.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4928.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_4971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5022.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5225.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5599.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5613.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5723.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_573.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_5971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6035.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6203.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6448.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6509.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6559.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6857.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_6878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7038.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_7946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8114.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8618.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_879.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_8843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_89.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9230.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9340.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9456.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9472.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9508.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9593.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9679.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9711.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9948.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50/images/woman/train_9975.jpg \n" ], [ "!unzip /content/gdrive/MyDrive/cifar50_imbalance_0.02.zip -d /content/gdrive/MyDrive/CIFAR50_imbalance_0.02", "\u001b[1;30;43mStreaming output truncated to the last 5000 lines.\u001b[0m\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4763.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4764.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4769.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4770.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4771.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4781.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4786.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4791.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4793.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4796.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4818.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4823.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4826.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4829.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4831.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4839.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4841.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4846.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/flatfish/train_4853.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1363.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1543.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1891.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1970.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_1995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2058.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2132.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2412.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2791.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_2965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3039.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3082.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3083.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_338.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3412.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3524.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3680.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3709.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_3983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4077.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4338.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4393.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4740.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4759.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_4923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_605.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_884.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/test_895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4857.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4858.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4863.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4864.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4867.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4874.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4879.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4884.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4891.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4899.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4903.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4914.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4919.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4922.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4928.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4929.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4932.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4933.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4934.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4939.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4940.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4944.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4948.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4950.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4955.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4956.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4959.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4964.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4970.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4974.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4977.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4978.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4981.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/fox/train_4983.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_110.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1174.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_118.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1393.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_1968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_219.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2239.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2498.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2650.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2831.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_2894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3154.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3301.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3341.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_3898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4067.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4464.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_451.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_455.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4656.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_4949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_596.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_69.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_950.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_964.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_977.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/test_997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4993.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4994.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4996.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_4999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5000.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5003.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5010.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5012.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5015.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5019.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5022.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5023.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5024.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5027.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5028.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5031.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5034.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5035.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5036.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5038.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5039.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5044.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5046.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5050.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5053.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5054.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5055.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5057.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5058.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5059.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5061.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5064.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5067.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5070.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5075.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5076.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5077.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5078.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5082.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5083.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5093.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/hamster/train_5103.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1114.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1127.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1158.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1351.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1413.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1437.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1485.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1508.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1549.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1771.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_1964.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2028.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2212.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2229.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_234.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2503.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2596.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_2908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3044.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3469.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3520.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3550.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3626.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_374.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_3973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4076.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4214.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4401.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4431.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4469.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4508.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4599.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4692.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_4916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_613.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/test_986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5106.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5107.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5110.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5112.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5114.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5118.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5122.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5127.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5131.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5132.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5134.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5137.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5138.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5140.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5151.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5154.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5157.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5158.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5161.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5165.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5174.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5183.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5185.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5189.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5191.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5196.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5197.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5201.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5203.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5209.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5211.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5212.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/kangaroo/train_5214.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1196.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1315.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_151.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1574.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1588.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1738.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1956.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_1965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2140.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2220.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2368.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2599.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2737.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2867.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_2999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3035.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3106.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3138.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3365.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3441.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3532.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3735.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3829.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_3969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4055.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4059.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_444.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4578.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4618.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4791.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4879.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_4908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_509.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_77.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_83.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_853.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/test_942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5215.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5219.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5220.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5222.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5225.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5229.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5230.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5233.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5234.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5236.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5237.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5239.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5240.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5241.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5253.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5255.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5264.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5268.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5270.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5278.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5280.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5289.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5291.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5296.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5301.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5312.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5315.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lamp/train_5316.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1264.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_157.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1573.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1649.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_1762.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2103.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2270.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_24.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_240.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2441.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2455.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2469.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2524.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2574.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2700.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2705.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2715.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2826.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_2948.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3389.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_3995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4185.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4196.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4253.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4412.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4472.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4543.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4605.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4641.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4716.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4956.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_4998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_661.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/test_990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5318.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5324.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5328.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5329.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5330.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5338.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5340.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5341.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5346.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5351.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5358.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5361.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5363.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5364.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5365.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5368.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5370.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5374.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5389.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5391.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5393.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5400.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5401.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5404.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5406.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5410.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/leopard/train_5411.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1338.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1358.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1410.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1532.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1613.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1727.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1786.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1884.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1914.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_1915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2039.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2044.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2137.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2174.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2406.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2485.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2551.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_2945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3131.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3230.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3236.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3280.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_3917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4015.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4368.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4485.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4502.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_477.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_4936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_692.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_723.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_939.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/test_95.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5412.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5413.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5415.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5418.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5424.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5430.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5431.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5437.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5440.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5441.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5444.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5445.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5447.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5448.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5449.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5451.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5452.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5454.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5455.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5456.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5463.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5464.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5465.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5469.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5470.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5472.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5473.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5477.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5485.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5488.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5490.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5494.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5495.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/lizard/train_5498.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1061.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1240.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1418.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1535.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1715.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1735.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1826.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_1978.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2055.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2233.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2312.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2329.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2341.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2389.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2401.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2566.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_2960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3027.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3505.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3536.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3545.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3621.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3691.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3928.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_3967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4236.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4884.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_4921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_52.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_919.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/test_980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5501.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5502.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5503.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5505.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5508.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5509.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5510.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5513.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5517.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5520.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5523.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5524.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5525.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5532.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5535.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5536.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5540.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5543.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5545.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5549.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5550.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5551.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5552.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5559.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5561.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5566.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5571.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5572.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5573.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5574.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5575.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5577.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5578.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/man/train_5579.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1000.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1012.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1019.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1781.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1839.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_1982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2201.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2415.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2447.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2473.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2549.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2578.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2628.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2649.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2769.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2786.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_2824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3215.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3770.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_3955.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_401.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4122.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4255.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4346.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4509.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4523.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4527.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4650.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4831.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_4918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_551.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_700.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_734.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_831.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_903.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/test_910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5581.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5588.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5589.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5593.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5596.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5599.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5605.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5607.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5612.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5613.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5615.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5618.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5621.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5624.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5625.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5626.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5628.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5631.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5636.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5639.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5641.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5642.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5646.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5649.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5650.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5651.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5652.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/motorcycle/train_5654.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1039.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1280.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1490.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1593.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1605.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1796.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_1988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2107.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2110.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2183.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2196.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2858.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_2991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3070.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3452.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3552.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3589.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3723.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3731.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3737.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_3903.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_40.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4106.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4151.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_4996.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_50.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_642.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_670.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_705.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_864.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/test_969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5656.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5657.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5661.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5667.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5670.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5673.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5675.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5679.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5680.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5686.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5687.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5691.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5692.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5693.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5700.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5703.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5704.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5705.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5706.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5709.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5711.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5713.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5715.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5716.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5722.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/mouse/train_5723.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1050.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1278.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1312.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1624.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_1987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2222.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2296.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_23.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2330.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2679.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2764.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_2923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3151.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3220.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3841.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3858.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3929.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_3980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_411.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4230.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4488.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4713.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_472.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_4989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_520.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_57.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_572.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_59.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_691.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_709.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/test_935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5724.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5725.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5727.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5731.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5734.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5735.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5737.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5738.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5740.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5742.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5745.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5751.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5758.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5759.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5760.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5762.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5763.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5764.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5769.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5770.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5771.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5781.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5783.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5786.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/oak_tree/train_5787.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1015.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1183.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1502.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_1925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2075.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2076.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2078.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_233.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2437.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_2922.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3222.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3451.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3470.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_35.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3509.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3650.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3939.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_3989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4024.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4107.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_447.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4612.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4680.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4771.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_4980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_559.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_626.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_73.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_763.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/test_976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5791.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5793.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5796.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5818.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5823.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5826.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5829.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5831.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5839.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5841.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/orchid/train_5846.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1165.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_13.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1324.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1447.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1477.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1525.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_16.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1625.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1680.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1863.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_1999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2077.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2112.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2653.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2713.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2841.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_2905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3055.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3161.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3490.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3573.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3607.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_363.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3932.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_3947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4031.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4222.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4264.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4589.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_46.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4615.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_469.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_4977.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_633.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_636.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_728.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_823.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/test_954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5853.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5857.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5858.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5863.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5864.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5867.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5869.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5873.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5874.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5879.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5884.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5889.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5891.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5899.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/palm_tree/train_5900.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1027.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1138.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_14.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_18.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_1938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2127.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2158.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_230.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2477.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2525.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2632.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2767.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2846.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2939.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_2958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3036.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3574.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3581.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3716.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_3970.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4278.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4490.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4495.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4501.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4532.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4700.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4727.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4737.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4837.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_4963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_573.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_577.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_61.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_62.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_65.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_781.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_89.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/test_995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5903.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5914.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5919.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5922.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5925.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5928.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5929.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5932.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5933.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5934.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5939.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5940.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5944.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5948.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/pickup_truck/train_5950.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1330.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1452.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1699.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_1966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2034.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2057.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2237.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2561.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2575.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2588.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2665.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2706.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2759.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2763.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_2978.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3134.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3140.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_318.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3340.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3495.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3501.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3624.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3625.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_365.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3769.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3771.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3933.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_3940.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4118.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4220.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4551.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4709.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4769.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4829.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_4929.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_494.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_540.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_55.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_589.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_599.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_93.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/test_943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5955.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5956.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5959.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5964.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5970.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5974.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5977.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5978.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5981.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5993.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5994.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5995.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/plain/train_5996.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1044.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1236.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_134.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1538.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1577.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1586.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1628.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_165.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1656.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1900.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1994.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_1997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2035.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2209.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2253.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2639.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2693.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2880.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2950.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_2979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3110.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3318.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3738.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3818.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_3996.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4161.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4340.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4441.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4449.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4477.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4649.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4735.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4836.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4898.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_4973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_578.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/test_862.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_5997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_5998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_5999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6000.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6003.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6009.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6010.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6012.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6015.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6019.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6022.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6023.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6024.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6027.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6028.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6031.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6034.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6035.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6036.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6038.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/poppy/train_6039.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1010.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1034.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1212.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1540.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1551.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1611.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1716.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1818.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_183.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1855.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1876.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_1992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2291.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2361.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2510.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2977.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_2980.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3148.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3179.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3358.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3473.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3670.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3786.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_389.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_3921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4064.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4078.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4203.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4418.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4447.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4571.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4733.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4741.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4891.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_4951.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_56.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_625.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_78.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_812.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/test_962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6040.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6044.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6046.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6047.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6048.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6050.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6053.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6054.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6055.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6057.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6058.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6059.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6061.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6064.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6067.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6070.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6072.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6075.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6076.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6077.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/possum/train_6078.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1055.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1076.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1154.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1215.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_132.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1468.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1829.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_1957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2006.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2083.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2208.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2545.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2642.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_27.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2738.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_2962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_301.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3058.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3114.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3225.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3229.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_329.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3410.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3918.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3974.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_3992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4110.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_430.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4301.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4440.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4777.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4807.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4874.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_4915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_503.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_593.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_64.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_656.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_761.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_974.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/test_975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6079.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6082.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6083.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6088.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6093.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6103.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6106.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6107.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6109.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6110.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6112.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6113.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/raccoon/train_6114.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1222.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1424.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1473.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_1927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2049.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2053.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_212.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2264.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2481.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2520.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_253.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2536.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2711.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_2969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3211.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3219.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3444.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3561.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3588.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3690.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_370.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3704.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3745.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_3860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4391.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_440.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4536.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_4961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_650.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_675.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_92.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/test_930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6118.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6120.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6122.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6123.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6127.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6131.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6132.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6134.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6137.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6138.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6140.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/road/train_6148.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_11.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1122.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1453.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1581.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1596.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1639.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1791.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_1972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2038.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2315.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2692.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2773.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_2982.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3019.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3107.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3415.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3575.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3576.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3705.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3711.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3734.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3852.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_3949.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4069.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4071.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4075.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4280.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4328.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4455.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4573.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4581.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4731.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4934.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_4999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_51.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_550.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_6.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_628.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_7.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_740.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_749.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_894.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/test_908.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6151.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6154.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6157.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6158.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6159.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6161.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6162.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6165.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6170.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6171.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6173.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6174.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6176.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6177.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/rose/train_6179.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_0.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1028.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1130.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1209.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1255.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1510.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1570.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1723.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_1899.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_196.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2012.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2027.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2082.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2370.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2430.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2589.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2734.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2818.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2975.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_2987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3002.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3158.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3165.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3185.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3488.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3540.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3543.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3615.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3672.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3673.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3867.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_3968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4225.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4413.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4430.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4445.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_464.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4691.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4849.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_4955.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_510.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_526.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_542.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_554.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_654.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_850.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/test_859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6182.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6183.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6185.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6189.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6191.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6194.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6196.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6197.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6199.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6201.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6203.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6205.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/seal/train_6208.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1031.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1070.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1234.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1364.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1401.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1724.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_1936.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2023.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2050.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2056.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2495.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2509.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2543.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2572.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2652.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2723.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2899.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2926.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2928.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2970.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_2985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3032.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3067.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3098.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3203.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3328.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3612.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3878.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3884.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3914.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_3962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4175.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4341.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4629.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4644.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4734.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4764.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4846.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_488.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4974.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_4990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_54.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_796.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/test_952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6209.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6211.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6212.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6214.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6215.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6219.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6220.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6222.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6225.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6229.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6230.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6233.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/shrew/train_6234.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_106.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1139.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1161.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1454.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1524.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1707.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1709.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1946.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_1990.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2014.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_231.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2365.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2474.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2489.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2501.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2522.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2651.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2758.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_2956.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3000.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3193.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3209.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3330.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3492.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3592.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_364.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3646.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3844.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_3979.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4117.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4119.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_42.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4330.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4424.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_450.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4540.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4636.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4682.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4910.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_4962.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_615.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_620.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_680.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_706.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_927.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/test_970.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6235.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6236.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6237.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6238.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6239.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6240.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6241.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6242.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6244.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6246.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6248.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6250.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6251.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6253.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6254.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6255.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6256.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6257.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/skyscraper/train_6258.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1054.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1241.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1340.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1341.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1346.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_135.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1488.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1541.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1748.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1806.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1810.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1843.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_1950.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2191.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2236.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2400.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2488.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2490.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2508.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2625.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2724.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2967.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_2972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3076.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3090.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3296.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_34.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_368.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_37.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3782.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3935.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_3948.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4080.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4444.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4673.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4736.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4823.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4864.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4892.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_4972.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_501.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_671.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_760.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_839.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_91.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_940.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_981.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/test_984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6264.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6266.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6267.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6268.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6270.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6274.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6275.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6278.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/snake/train_6280.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1003.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1077.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_114.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1221.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_124.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1501.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1533.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1561.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1686.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1816.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_1993.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2168.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2232.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2456.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2463.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2537.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2732.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2747.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2779.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2897.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_2966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3041.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3237.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3517.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3578.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3590.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3719.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3763.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3809.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3857.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_3986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4189.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4240.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4624.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_467.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4712.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4792.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4867.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4903.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4906.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_4914.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_502.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_701.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_720.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_80.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_882.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_913.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_932.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_933.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_944.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/test_96.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6284.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6286.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6288.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6289.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6291.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6292.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6293.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6296.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/squirrel/train_6301.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_111.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1253.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1464.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1517.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1679.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1694.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1758.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_1958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2021.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2153.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2338.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2393.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2472.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2475.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_25.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2559.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2770.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2853.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_289.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_2920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3010.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_31.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3200.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3430.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3431.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3477.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3521.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_361.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3649.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3686.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3890.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_3904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4019.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4057.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4114.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4389.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4456.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4463.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4510.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4613.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4639.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4667.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4710.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4729.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4774.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4848.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4940.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4959.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_4971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_498.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_552.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_566.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_771.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/test_905.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6302.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6311.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6312.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6313.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6315.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6318.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6319.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/sunflower/train_6320.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1091.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1217.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1309.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_136.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1515.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1582.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1706.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1755.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1788.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1916.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1917.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_1989.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2116.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2145.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2268.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2704.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2968.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_2986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3060.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3265.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3295.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3315.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3455.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3471.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3529.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3557.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3594.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3971.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_3991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4068.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4082.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4089.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4102.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4298.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4473.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_448.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4579.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4601.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4662.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4686.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4688.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_4957.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_504.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_553.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_580.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_634.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_726.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_765.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_786.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_811.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/test_966.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6322.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6324.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6325.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6326.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6328.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6329.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6330.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6337.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/table/train_6338.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1223.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_144.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1516.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1536.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1800.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1804.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1830.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1875.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_1922.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2026.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2093.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2104.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2181.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2261.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2493.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2583.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2821.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2943.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_2973.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3004.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3024.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3105.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3152.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3172.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3278.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3305.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3336.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_335.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3436.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3462.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3487.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3498.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3531.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3660.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3717.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3742.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3795.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3960.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3976.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_3987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4016.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_406.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4073.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4097.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4137.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4446.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4497.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4678.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4799.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4945.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_4983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_764.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_842.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/test_901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6340.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6341.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6342.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6343.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6346.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6349.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6351.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6352.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6353.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/telephone/train_6354.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1087.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1203.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1316.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1321.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1439.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1448.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1558.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1562.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1636.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1681.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1785.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1793.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1847.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_1941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2061.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2074.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_210.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2214.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2383.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2418.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2506.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2618.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_263.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2636.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2730.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2834.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2851.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_2997.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3007.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3008.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3015.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3045.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3054.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3166.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3273.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_348.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3502.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3696.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3803.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3863.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_391.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_3956.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4030.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4167.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4262.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4271.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4300.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4310.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_434.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4344.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4507.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_4772.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_600.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_621.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_648.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_703.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_820.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_861.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_938.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_947.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_953.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_978.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/test_991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6355.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6356.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6358.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6359.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6360.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6361.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6363.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6364.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6365.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6366.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6368.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tiger/train_6369.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1036.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1042.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1083.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1099.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1328.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1339.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1429.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1484.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1589.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1597.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1619.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1655.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1667.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1753.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1845.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1859.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_187.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1881.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1911.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1920.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1921.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_1952.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2066.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2095.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2227.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2281.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2283.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2534.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2605.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2727.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_28.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2865.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_2961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3051.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_315.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3201.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3351.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3357.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3411.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3449.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3606.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3630.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3637.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_369.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3789.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3856.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3866.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3902.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_3981.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4086.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4209.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4243.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4314.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_457.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4587.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4595.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4702.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4854.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4958.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_499.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_4992.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_511.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_519.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_546.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_614.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_643.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_646.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_716.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_838.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_870.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/test_98.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6370.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6372.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6373.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6374.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6376.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6377.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6378.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/train/train_6383.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1024.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1033.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1141.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1259.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1307.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_143.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1494.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1560.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1603.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1790.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1805.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1828.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_186.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_1974.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2025.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2128.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2178.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2234.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2255.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2272.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2279.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2294.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2328.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2354.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2367.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_26.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2617.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2626.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_2895.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3013.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3022.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3190.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3249.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3306.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3347.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3380.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3419.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_346.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3460.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3461.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3476.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3480.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3486.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_350.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3513.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3645.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3697.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3724.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3778.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3798.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3817.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3901.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3942.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_3988.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4017.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4094.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4133.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4134.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4213.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4252.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4478.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4627.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4638.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4658.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4704.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_48.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4819.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4931.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_4969.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_5.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_514.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_561.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_612.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_639.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_684.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_698.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_754.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_794.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_907.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/test_994.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6385.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6386.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6387.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6388.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6389.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6390.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6391.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6392.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6393.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6394.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/tulip/train_6396.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_101.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1043.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1258.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1483.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1503.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1547.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_163.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1668.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1784.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1835.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_184.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1877.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1885.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1904.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_192.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1984.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1985.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_1986.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2011.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2052.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2065.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2084.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2155.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2195.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2332.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2438.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2512.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2577.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2640.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2766.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2775.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2797.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2808.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_2896.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3018.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3081.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3100.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3121.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3202.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3212.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3269.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_327.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_333.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3395.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3408.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3443.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3577.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3602.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_371.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3740.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3760.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3825.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3826.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3827.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3868.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_3930.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_404.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4147.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4150.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4291.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4396.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4458.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4466.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4604.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4616.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4664.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4833.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_4872.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_508.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_679.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_739.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_815.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_87.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_914.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_923.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_941.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/test_965.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6397.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6398.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6399.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6400.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6401.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6402.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6404.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6405.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6406.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/wardrobe/train_6408.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1118.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1218.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1229.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1245.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1247.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1285.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_129.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_146.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1482.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_149.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1556.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1584.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1689.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1711.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1768.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1769.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1776.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_180.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1888.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_1954.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2010.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_211.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2126.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2164.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2206.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2276.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2303.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2334.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2379.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2622.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2801.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_287.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_2915.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3020.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3059.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3062.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3108.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3125.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_317.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3191.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3282.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3320.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3503.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3591.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3609.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3610.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3693.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3695.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3718.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3780.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3787.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_382.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_3999.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4037.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4096.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4345.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4400.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4403.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_442.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4459.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4496.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4530.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_454.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4564.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4666.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4670.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_470.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4708.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4721.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4824.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4840.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4893.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_4991.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_505.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_539.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_548.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_568.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_624.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_647.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_677.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_683.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_793.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_814.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_860.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_886.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/test_996.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6409.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6410.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6411.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6412.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6413.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6414.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6415.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6416.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6418.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/willow_tree/train_6419.jpg \n creating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/\n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1001.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1058.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1092.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1169.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1224.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1277.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1308.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1400.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1404.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1417.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1433.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1567.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1608.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1676.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1714.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1744.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1912.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1934.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1961.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_1983.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_204.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2085.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2142.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2156.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_216.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2188.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2207.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2219.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_228.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2290.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2362.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2375.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2407.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2449.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2500.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2517.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2555.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_260.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2663.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2669.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2750.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2757.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_2802.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_297.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3005.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3029.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_304.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3160.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3299.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3384.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3544.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3569.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3598.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3635.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3743.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3752.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_381.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3813.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3874.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_3883.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4063.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4115.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4157.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4198.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4226.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_432.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4323.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4331.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_435.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4451.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4479.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4491.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4518.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4528.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4565.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4659.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4746.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4756.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4770.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4781.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4863.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4871.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4887.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4909.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_4924.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_563.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_585.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_623.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_674.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_685.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_737.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_822.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_832.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_84.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_937.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_963.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_97.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_987.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/test_998.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6420.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6421.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6422.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6423.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6424.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6425.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6426.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6427.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6428.jpg \n inflating: /content/gdrive/MyDrive/CIFAR50_imbalance_0.02/images/woman/train_6429.jpg \n" ] ], [ [ "Load and print label files to know the format of the provided labels.\n- `cifar50_train.json`\n- `cifar50_imbalance_0.02_train.json`", "_____no_output_____" ] ], [ [ "# TODO: load json files and print.\nimport json\nimport os\nimport pandas as pd\n\nos.chdir(\"/content/gdrive/MyDrive/CIFAR50\")\nf = open('cifar50_train.json','r')\n\ndata = json.loads(f.read())\ndf = pd.json_normalize(data['annotations'])\nprint(df)", " fpath ... category\n0 data\\cifar50\\images\\apple\\train_0.jpg ... apple\n1 data\\cifar50\\images\\telephone\\train_1.jpg ... telephone\n2 data\\cifar50\\images\\train\\train_2.jpg ... train\n3 data\\cifar50\\images\\cup\\train_3.jpg ... cup\n4 data\\cifar50\\images\\willow_tree\\train_4.jpg ... willow_tree\n... ... ... ...\n24995 data\\cifar50\\images\\fox\\train_24995.jpg ... fox\n24996 data\\cifar50\\images\\possum\\train_24996.jpg ... possum\n24997 data\\cifar50\\images\\crab\\train_24997.jpg ... crab\n24998 data\\cifar50\\images\\can\\train_24998.jpg ... can\n24999 data\\cifar50\\images\\squirrel\\train_24999.jpg ... squirrel\n\n[25000 rows x 4 columns]\n" ], [ "os.chdir(\"/content/gdrive/MyDrive/CIFAR50_imbalance_0.02\")\n\nf = open('cifar50_imbalance_0.02_train.json','r')\n\ndata = json.load(f)\ndf_imbalance = pd.json_normalize(data['annotations'])\nprint(df_imbalance)", " fpath ... category\n0 data\\cifar50_imbalance_0.02\\images\\apple\\train... ... apple\n1 data\\cifar50_imbalance_0.02\\images\\apple\\train... ... apple\n2 data\\cifar50_imbalance_0.02\\images\\apple\\train... ... apple\n3 data\\cifar50_imbalance_0.02\\images\\apple\\train... ... apple\n4 data\\cifar50_imbalance_0.02\\images\\apple\\train... ... apple\n... ... ... ...\n6425 data\\cifar50_imbalance_0.02\\images\\woman\\train... ... woman\n6426 data\\cifar50_imbalance_0.02\\images\\woman\\train... ... woman\n6427 data\\cifar50_imbalance_0.02\\images\\woman\\train... ... woman\n6428 data\\cifar50_imbalance_0.02\\images\\woman\\train... ... woman\n6429 data\\cifar50_imbalance_0.02\\images\\woman\\train... ... woman\n\n[6430 rows x 4 columns]\n" ] ], [ [ "Show some images with labels (class names) from dataset.", "_____no_output_____" ] ], [ [ "# Root directory for dataset\ndataroot = \"/content/gdrive/MyDrive/CIFAR50\"\n\n# Number of workers for dataloader\nworkers = 2\n\n# Batch size during training\nbatch_size = 128\n\n# Spatial size of training images. All images will be resized to this\n# size using a transformer.\nimage_size = 64\n\n# Learning rate for optimizers\nlr = 0.0001\n\n# Beta1 hyperparam for Adam optimizers\nbeta1 = 0.5", "_____no_output_____" ], [ "directory = \"/content/gdrive/MyDrive/CIFAR50/images\"\n\nclasses = [ f.name for f in os.scandir(directory) if f.is_dir() ]\nprint(classes)", "['apple', 'baby', 'beaver', 'bee', 'bicycle', 'bowl', 'bridge', 'butterfly', 'can', 'caterpillar', 'chair', 'clock', 'cockroach', 'crab', 'cup', 'dolphin', 'flatfish', 'fox', 'hamster', 'kangaroo', 'lamp', 'leopard', 'lizard', 'man', 'motorcycle', 'mouse', 'oak_tree', 'orchid', 'palm_tree', 'pickup_truck', 'plain', 'poppy', 'possum', 'raccoon', 'road', 'rose', 'seal', 'shrew', 'skyscraper', 'snake', 'squirrel', 'sunflower', 'table', 'telephone', 'tiger', 'train', 'tulip', 'wardrobe', 'willow_tree', 'woman']\n" ], [ "from torch.utils.data import Dataset, DataLoader\nfrom torchvision import transforms, utils\nclass CIFAR50(Dataset):\n def __init__(self, json_file,root_dir,transform=None,train=True):\n '''\n Reads json file, adds them to annotations. \n '''\n with open (json_file, mode='r') as f:\n json_dump=f.read()\n json_fomatted=json.loads(json_dump)\n self.annotations= json_fomatted['annotations']\n self.root_dir=root_dir\n self.transform=transform\n\n def __len__(self):\n return (len(self.annotations))\n \n def __getitem__(self,index):\n '''\n Returns an image and label based on annotations\n '''\n img_path=os.path.join(self.annotations[index]['fpath'].replace('\\\\','/'))\n image_raw=cv2.imread(img_path)\n image=image_raw[:,:,::-1].copy()\n y_label=torch.tensor(int(self.annotations[index]['category_id']))\n if self.transform:\n image=self.transform(image)\n return (image,y_label)", "_____no_output_____" ], [ "transform_cifar = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])\n\ntrain_dataset = CIFAR50(\"/content/gdrive/MyDrive/CIFAR50/cifar50_train.json\", \"/content/gdrive/MyDrive/CIFAR50/images\", transform = transform_cifar, train = True)\ntest_dataset = CIFAR50(\"/content/gdrive/MyDrive/CIFAR50/cifar50_test.json\", \"/content/gdrive/MyDrive/CIFAR50/images\", transform = transform_cifar, train = False)\n\nprint(\"Print the training dataset before augmentation:\\n \", train_dataset)\nprint(\"Print the testing dataset:\\n \", test_dataset)", "Print the training dataset before augmentation:\n <__main__.CIFAR50 object at 0x7f8e61787bd0>\nPrint the testing dataset:\n <__main__.CIFAR50 object at 0x7f8e61787f10>\n" ], [ "%cd '/content/gdrive/MyDrive'", "/content/gdrive/MyDrive\n" ], [ "import json\njson_dump=\"\"\nwith open (\"/content/gdrive/MyDrive/CIFAR50/cifar50_train.json\", mode='r') as f:\n json_dump=f.read()\njson_fomatted=json.loads(json_dump)\njson_fomatted['annotations'][:5]", "_____no_output_____" ], [ "import matplotlib.pyplot as plt\nimport cv2\n\njson_fomatted['annotations'][0]\nfig,ax=plt.subplots(1,5)\nfor i in range(1,6):\n img_path=json_fomatted['annotations'][i]['fpath'].replace('\\\\','/')\n print(img_path)\n img=cv2.imread(\"/content/gdrive/MyDrive/\"+img_path)\n ax[i-1].imshow(img[:,:,::-1])\n ax[i-1].text(y=-8,x=0,s=json_fomatted['annotations'][i]['category'],color=\"g\")\n ax[i-1].axis('off')\n fig.show()", "data/cifar50/images/telephone/train_1.jpg\ndata/cifar50/images/train/train_2.jpg\ndata/cifar50/images/cup/train_3.jpg\ndata/cifar50/images/willow_tree/train_4.jpg\ndata/cifar50/images/sunflower/train_5.jpg\n" ], [ "train_loader = torch.utils.data.DataLoader(train_dataset, batch_size= 128, shuffle=True, sampler=None,\n batch_sampler=None, num_workers=0, collate_fn=None,\n pin_memory=False, drop_last=False, timeout=0,\n worker_init_fn=None, prefetch_factor=2,\n persistent_workers=False)\n\ntest_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size,\n shuffle=True, num_workers=workers)\n\nprint(train_loader)\nprint(test_loader)", "<torch.utils.data.dataloader.DataLoader object at 0x7f8e5f339650>\n<torch.utils.data.dataloader.DataLoader object at 0x7f8e5f339ad0>\n" ], [ "# Decide which device we want to run on\ndevice = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")", "_____no_output_____" ], [ "iterator=iter(train_loader)\nimages, label=iterator.next()\nplt.imshow(images[0].permute(1,2,0))\n# plt.title(label_mapping[int(label[0])])\nplt.title(int(label[0]))", "Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).\n" ], [ "for X, y in train_loader:\n print(\"Shape of X [N, C, H, W]: \", X.shape)\n print(\"Shape of y: \", y.shape, y.dtype)\n break", "Shape of X [N, C, H, W]: torch.Size([128, 3, 32, 32])\nShape of y: torch.Size([128]) torch.int64\n" ] ], [ [ "## Train CNNs.\n\n#### Use the CNN in HW2 to train the model on the balanced CIFAR50 dataset.\n\nTrain the CNN on the balanced CIFAR50 training set. Evaluate and report the classification accuracies on the testing set. \n\nNote: You can use any network configurations you implemented in HW2.", "_____no_output_____" ] ], [ [ "import torch.nn as nn\nimport torch.nn.functional as F\n\nclass Net(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 6, 5)\n self.pool = nn.MaxPool2d(2, 2)\n self.conv2 = nn.Conv2d(6, 16, 5)\n self.fc1 = nn.Linear(400, 120)\n self.fc2 = nn.Linear(120, 84)\n self.fc3 = nn.Linear(84, 100)\n\n def forward(self, x):\n x = self.pool(F.relu(self.conv1(x)))\n x = self.pool(F.relu(self.conv2(x)))\n x = torch.flatten(x, 1)\n x = F.relu(self.fc1(x))\n x = F.relu(self.fc2(x))\n x = self.fc3(x)\n return x\nnet = Net().to(device)", "_____no_output_____" ], [ "import torch.optim as optim\n\ncriterion = nn.CrossEntropyLoss()\noptimizer = optim.SGD(net.parameters(), lr=0.001, momentum=0.9)", "_____no_output_____" ], [ "for epoch in range(20): \n running_loss = 0.0\n for i, data in enumerate(train_loader, 0):\n inputs, labels = data\n inputs, labels = inputs.to(device), labels.to(device)\n optimizer.zero_grad()\n outputs = net(inputs)\n outputs = outputs.to(device)\n loss = criterion(outputs, labels)\n loss.backward()\n optimizer.step()\n running_loss += loss.item()\n if i % 2000 == 1999: # print every 2000 mini-batches\n print('[%d, %5d] loss: %.3f' % (epoch + 1, i + 1, running_loss/2000))\n running_loss = 0.0\nprint('Finished Training')", "Finished Training\n" ], [ "n_correct=0\nn_samples=0\nwith torch.no_grad():\n for data in test_loader:\n inputs_test, labels_test = data\n inputs_test, labels_test = inputs_test.to(device), labels_test.to(device)\n outputs_test = net(inputs_test)\n outputs_test = outputs_test.to(device)\n _, predicted = torch.max(outputs_test.data, 1)\n n_samples+=labels_test.size(0)\n n_correct+=(predicted==labels_test).sum().item() \nprint('Test accuracy:', (n_correct/n_samples*100)) ", "Test accuracy: 19.040000000000003\n" ] ], [ [ "#### Use the same CNN in HW2 to train the model on the imbalanced CIFAR50 dataset.\n\nTrain the CNN on the imbalanced CIFAR50 training set. Evaluate and report the classification accuracies on the testing set. \n", "_____no_output_____" ] ], [ [ "# Root directory for dataset\ndataroot = \"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02\"\n\n# Number of workers for dataloader\nworkers = 2\n\n# Batch size during training\nbatch_size = 128\n\n# Spatial size of training images. All images will be resized to this\n# size using a transformer.\nimage_size = 64\n\n# Learning rate for optimizers\nlr = 0.0002\n\n# Beta1 hyperparam for Adam optimizers\nbeta1 = 0.5", "_____no_output_____" ], [ "directory = \"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/images\"\n\nclasses = [ f.name for f in os.scandir(directory) if f.is_dir() ]\nprint(classes)", "['apple', 'baby', 'beaver', 'bee', 'bicycle', 'bowl', 'bridge', 'butterfly', 'can', 'caterpillar', 'chair', 'clock', 'cockroach', 'crab', 'cup', 'dolphin', 'flatfish', 'fox', 'hamster', 'kangaroo', 'lamp', 'leopard', 'lizard', 'man', 'motorcycle', 'mouse', 'oak_tree', 'orchid', 'palm_tree', 'pickup_truck', 'plain', 'poppy', 'possum', 'raccoon', 'road', 'rose', 'seal', 'shrew', 'skyscraper', 'snake', 'squirrel', 'sunflower', 'table', 'telephone', 'tiger', 'train', 'tulip', 'wardrobe', 'willow_tree', 'woman']\n" ], [ "transform_cifar = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])\n\ntrain_dataset_imbalanced = CIFAR50(\"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/cifar50_imbalance_0.02_train.json\", \"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/images\", transform = transform_cifar, train = True)\ntest_dataset_imbalanced = CIFAR50(\"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/cifar50_imbalance_0.02_test.json\", \"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/images\", transform = transform_cifar, train = False)\n\nprint(\"Print the training dataset before augmentation:\\n \", train_dataset_imbalanced)\nprint(\"Print the testing dataset:\\n \", test_dataset_imbalanced)", "Print the training dataset before augmentation:\n <__main__.CIFAR50 object at 0x7f8e60108e10>\nPrint the testing dataset:\n <__main__.CIFAR50 object at 0x7f8e60108f90>\n" ], [ "import json\njson_dump=\"\"\nwith open (\"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/cifar50_imbalance_0.02_train.json\", mode='r') as f:\n json_dump=f.read()\njson_fomatted=json.loads(json_dump)\njson_fomatted['annotations'][:5]", "_____no_output_____" ], [ "import matplotlib.pyplot as plt\nimport cv2\n\njson_fomatted['annotations'][0]\nfig,ax=plt.subplots(1,5)\nfor i in range(1,6):\n img_path=json_fomatted['annotations'][i]['fpath'].replace('\\\\','/')\n img=cv2.imread(\"/content/gdrive/MyDrive/\"+img_path)\n ax[i-1].imshow(img[:,:,::-1])\n ax[i-1].text(y=-8,x=0,s=json_fomatted['annotations'][i]['category'],color=\"Green\")\n ax[i-1].axis('off')\n fig.show()", "_____no_output_____" ], [ "train_loader_imbalanced = torch.utils.data.DataLoader(train_dataset_imbalanced, batch_size= 128, shuffle=True, sampler=None,\n batch_sampler=None, num_workers=0, collate_fn=None,\n pin_memory=False, drop_last=False, timeout=0,\n worker_init_fn=None, prefetch_factor=2,\n persistent_workers=False)\n\ntest_loader_imbalanced = torch.utils.data.DataLoader(test_dataset_imbalanced, batch_size= 128, shuffle=True, sampler=None,\n batch_sampler=None, num_workers=0, collate_fn=None,\n pin_memory=False, drop_last=False, timeout=0,\n worker_init_fn=None, prefetch_factor=2,\n persistent_workers=False)\n\nprint(train_loader_imbalanced)\nprint(test_loader_imbalanced)", "<torch.utils.data.dataloader.DataLoader object at 0x7f8e5e81b890>\n<torch.utils.data.dataloader.DataLoader object at 0x7f8e5e81ba90>\n" ], [ "iterator=iter(train_loader_imbalanced)\nimages, label=iterator.next()\nplt.imshow(images[0].permute(1,2,0))\n# plt.title(label_mapping[int(label[0])])\nplt.title(int(label[0]))", "Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).\n" ], [ "for X, y in train_loader_imbalanced:\n print(\"Shape of X [N, C, H, W]: \", X.shape)\n print(\"Shape of y: \", y.shape, y.dtype)\n break", "Shape of X [N, C, H, W]: torch.Size([128, 3, 32, 32])\nShape of y: torch.Size([128]) torch.int64\n" ], [ "for epoch in range(20): \n running_loss = 0.0\n for i, data in enumerate(train_loader_imbalanced, 0):\n inputs, labels = data\n inputs, labels = inputs.to(device), labels.to(device)\n optimizer.zero_grad()\n outputs = net(inputs)\n outputs = outputs.to(device)\n loss = criterion(outputs, labels)\n loss.backward()\n optimizer.step()\n running_loss += loss.item()\n if i % 2000 == 1999: # print every 2000 mini-batches\n print('[%d, %5d] loss: %.3f' % (epoch + 1, i + 1, running_loss/2000))\n running_loss = 0.0\nprint('Finished Training')", "Finished Training\n" ], [ "n_correct=0\nn_samples=0\nwith torch.no_grad():\n for data in test_loader_imbalanced:\n inputs_test, labels_test = data\n inputs_test, labels_test = inputs_test.to(device), labels_test.to(device)\n outputs_test = net(inputs_test)\n outputs_test = outputs_test.to(device)\n _, predicted = torch.max(outputs_test.data, 1)\n n_samples+=labels_test.size(0)\n n_correct+=(predicted==labels_test).sum().item() \nprint('Test accuracy:', (n_correct/n_samples*100)) ", "Test accuracy: 15.959999999999999\n" ] ], [ [ "## Implement Tricks for LTR\n\nBefore starting this question, please read the paper for this homework: Bag of tricks for long-tailed visual recognition with deep convolutional neural networks.\n\nAccording to this paper, select at least **three** tricks to implement on the imbalanced CIFAR50 training. ", "_____no_output_____" ] ], [ [ "# TODO: trick 1 implementation\nimport glob\nimport pickle\nclass CIFAR_Dataset(Dataset):\n data_dir = \"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02\"\n train = True\n transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])\n \n path = \"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/images/*/train*.jpg\"\n \n\n def __init__(self, data_dir = \"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/\", train = True, transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])):\n self.data_dir = data_dir\n self.train = train\n self.transform = transform\n self.data = []\n self.targets = []\n path = \"/content/gdrive/MyDrive/data/cifar50_imbalance_0.02/images/*/train*.jpg\"\n # Loading all the data depending on whether the dataset is training or testing\n if self.train:\n for filename in glob.glob(path):\n for i in range(len(classes)):\n #with open(data_dir + 'images/' + classes[i] + '/train_' + '.jpg' , 'rb') as f:\n with open(filename , 'rb') as f:\n #entry = pickle.load(f, encoding='latin1')\n \n self.data.append(entry['data'])\n self.targets.extend(entry['labels'])\n else:\n with open(data_dir + 'test_batch', 'rb') as f:\n entry = pickle.load(f, encoding='latin1')\n self.data.append(entry['data'])\n self.targets.extend(entry['labels'])\n\n # Reshape it and turn it into the HWC format which PyTorch takes in the images\n # Original CIFAR format can be seen via its official page\n self.data = np.vstack(self.data).reshape(-1, 3, 32, 32)\n self.data = self.data.transpose((0, 2, 3, 1))\n\n def __len__(self):\n return len(self.data)\n\n def __getitem__(self, idx):\n\n # Create a one hot label\n label = torch.zeros(10)\n label[self.targets[idx]] = 1.\n\n # Transform the image by converting to tensor and normalizing it\n if self.transform:\n image = transform(self.data[idx])\n\n # If data is for training, perform mixup, only perform mixup roughly on 1 for every 5 images\n if self.train and idx > 0 and idx%5 == 0:\n\n # Choose another image/label randomly\n mixup_idx = random.randint(0, len(self.data)-1)\n mixup_label = torch.zeros(10)\n label[self.targets[mixup_idx]] = 1.\n if self.transform:\n mixup_image = transform(self.data[mixup_idx])\n\n # Select a random number from the given beta distribution\n # Mixup the images accordingly\n alpha = 0.2\n lam = np.random.beta(alpha, alpha)\n image = lam * image + (1 - lam) * mixup_image\n label = lam * label + (1 - lam) * mixup_label\n\n return image, label", "_____no_output_____" ], [ "net = Net().to(device)\noptimizer = torch.optim.Adam(net.parameters(), lr=0.0002)\ncriterion = nn.CrossEntropyLoss()\nbest_Acc = 0\n\nfor epoch in range(20):\n net.train()\n # We train and visualize the loss every 100 iterations\n for idx, (imgs, labels) in enumerate(train_loader_imbalanced):\n imgs = imgs.to(device)\n labels = labels.to(device)\n preds = net(imgs)\n loss = criterion(preds, labels)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if idx%100 == 0:\n print(\"Epoch {} Iteration {}, Current Loss: {}\".format(epoch, idx, loss))\n\n # We evaluate the network after every epoch based on test set accuracy\n net.eval()\n with torch.no_grad():\n total = 0\n numCorrect = 0\n for (imgs, labels) in test_loader_imbalanced:\n imgs = imgs.to(device)\n labels = labels.to(device)\n preds = net(imgs)\n numCorrect += (torch.argmax(preds, dim=-1) == torch.argmax(labels, dim=-1)).float().sum()\n total += len(imgs)\n acc = (numCorrect/total)*100\n print(\"Current image classification accuracy at epoch {}: {}\".format(epoch, acc))\n if acc > best_Acc:\n best_Acc = acc", "Epoch 0 Iteration 0, Current Loss: 4.614141464233398\nCurrent image classification accuracy at epoch 0: 2.359999895095825\nEpoch 1 Iteration 0, Current Loss: 3.7443413734436035\nCurrent image classification accuracy at epoch 1: 1.9399998188018799\nEpoch 2 Iteration 0, Current Loss: 3.255664110183716\nCurrent image classification accuracy at epoch 2: 1.0\nEpoch 3 Iteration 0, Current Loss: 3.4127871990203857\nCurrent image classification accuracy at epoch 3: 2.1399998664855957\nEpoch 4 Iteration 0, Current Loss: 3.291637420654297\nCurrent image classification accuracy at epoch 4: 1.7799999713897705\nEpoch 5 Iteration 0, Current Loss: 3.1298210620880127\nCurrent image classification accuracy at epoch 5: 1.0\nEpoch 6 Iteration 0, Current Loss: 2.975938081741333\nCurrent image classification accuracy at epoch 6: 0.9599999785423279\nEpoch 7 Iteration 0, Current Loss: 2.9351491928100586\nCurrent image classification accuracy at epoch 7: 3.5999999046325684\nEpoch 8 Iteration 0, Current Loss: 2.9620256423950195\nCurrent image classification accuracy at epoch 8: 2.499999761581421\nEpoch 9 Iteration 0, Current Loss: 3.0351388454437256\nCurrent image classification accuracy at epoch 9: 1.4800000190734863\nEpoch 10 Iteration 0, Current Loss: 2.956851005554199\nCurrent image classification accuracy at epoch 10: 2.5999999046325684\nEpoch 11 Iteration 0, Current Loss: 3.0027921199798584\nCurrent image classification accuracy at epoch 11: 2.1599998474121094\nEpoch 12 Iteration 0, Current Loss: 2.8152809143066406\nCurrent image classification accuracy at epoch 12: 1.5199999809265137\nEpoch 13 Iteration 0, Current Loss: 2.8033876419067383\nCurrent image classification accuracy at epoch 13: 1.8600000143051147\nEpoch 14 Iteration 0, Current Loss: 2.7522952556610107\nCurrent image classification accuracy at epoch 14: 1.6999999284744263\nEpoch 15 Iteration 0, Current Loss: 2.6533875465393066\nCurrent image classification accuracy at epoch 15: 1.3599998950958252\nEpoch 16 Iteration 0, Current Loss: 2.923511505126953\nCurrent image classification accuracy at epoch 16: 1.2599999904632568\nEpoch 17 Iteration 0, Current Loss: 2.650336980819702\nCurrent image classification accuracy at epoch 17: 2.6599998474121094\nEpoch 18 Iteration 0, Current Loss: 2.5230395793914795\nCurrent image classification accuracy at epoch 18: 1.9399998188018799\nEpoch 19 Iteration 0, Current Loss: 2.8459811210632324\nCurrent image classification accuracy at epoch 19: 2.499999761581421\n" ], [ "# TODO: trick 2 implementation\ndef make_weights_for_balanced_classes(images, nclasses): \n count = [0] * nclasses \n for item in images: \n count[item[1]] += 1 \n weight_per_class = [0.] * nclasses \n N = float(sum(count)) \n for i in range(nclasses): \n weight_per_class[i] = N/float(count[i]) \n weight = [0] * len(images) \n for idx, val in enumerate(images): \n weight[idx] = weight_per_class[val[1]] \n return weight", "_____no_output_____" ], [ "net = Net().to(device)\noptimizer = torch.optim.Adam(net.parameters(), lr=0.0002)\ncriterion = nn.CrossEntropyLoss()\nbest_Acc = 0\n\nfor epoch in range(20):\n net.train()\n # We train and visualize the loss every 100 iterations\n for idx, (imgs, labels) in enumerate(train_loader_imbalanced):\n imgs = imgs.to(device)\n labels = labels.to(device)\n preds = net(imgs)\n loss = criterion(preds, labels)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if idx%100 == 0:\n print(\"Epoch {} Iteration {}, Current Loss: {}\".format(epoch, idx, loss))\n\n # We evaluate the network after every epoch based on test set accuracy\n net.eval()\n with torch.no_grad():\n total = 0\n numCorrect = 0\n for (imgs, labels) in test_loader_imbalanced:\n imgs = imgs.to(device)\n labels = labels.to(device)\n preds = net(imgs)\n numCorrect += (torch.argmax(preds, dim=-1) == torch.argmax(labels, dim=-1)).float().sum()\n total += len(imgs)\n acc = (numCorrect/total)*100\n print(\"Current image classification accuracy at epoch {}: {}\".format(epoch, acc))\n if acc > best_Acc:\n best_Acc = acc", "_____no_output_____" ], [ "# TODO: trick 3 implementation\nclass_weight=[]\n\nfor root, subdir,files in os.walk(directory):\n if len(files)>0:\n class_weight.append(1/len(files))\nclass_weight = torch.FloatTensor(class_weight).to(device)\n\nloss_fn = nn.CrossEntropyLoss(weight=class_weight)", "_____no_output_____" ], [ "net = Net().to(device)\noptimizer = torch.optim.Adam(net.parameters(), lr=0.0002)\ncriterion = nn.CrossEntropyLoss()\nbest_Acc = 0\n\nfor epoch in range(20):\n net.train()\n # We train and visualize the loss every 100 iterations\n for idx, (imgs, labels) in enumerate(train_loader_imbalanced):\n imgs = imgs.to(device)\n labels = labels.to(device)\n preds = net(imgs)\n loss = criterion(preds, labels)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if idx%100 == 0:\n print(\"Epoch {} Iteration {}, Current Loss: {}\".format(epoch, idx, loss))\n\n # We evaluate the network after every epoch based on test set accuracy\n net.eval()\n with torch.no_grad():\n total = 0\n numCorrect = 0\n for (imgs, labels) in test_loader_imbalanced:\n imgs = imgs.to(device)\n labels = labels.to(device)\n preds = net(imgs)\n numCorrect += (torch.argmax(preds, dim=-1) == torch.argmax(labels, dim=-1)).float().sum()\n total += len(imgs)\n acc = (numCorrect/total)*100\n print(\"Current image classification accuracy at epoch {}: {}\".format(epoch, acc))\n if acc > best_Acc:\n best_Acc = acc", "_____no_output_____" ] ], [ [ "Evaluate and report the classification performance on CIFAR50 testing set.", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
cbfc1e25e7444bc9a4d713b9931cde69700eed9c
21,153
ipynb
Jupyter Notebook
week03_convnets/seminar_pytorch.ipynb
ftju/Practical_DL
b45d53949d5cb08f7d604fe34c83556a1c2cc65b
[ "MIT" ]
1
2019-04-03T14:00:14.000Z
2019-04-03T14:00:14.000Z
week03_convnets/seminar_pytorch.ipynb
ftju/Practical_DL
b45d53949d5cb08f7d604fe34c83556a1c2cc65b
[ "MIT" ]
null
null
null
week03_convnets/seminar_pytorch.ipynb
ftju/Practical_DL
b45d53949d5cb08f7d604fe34c83556a1c2cc65b
[ "MIT" ]
1
2019-05-01T20:17:04.000Z
2019-05-01T20:17:04.000Z
32.493088
371
0.563703
[ [ [ "# Deep learning for computer vision\n\n\nThis notebook will teach you to build and train convolutional networks for image recognition. Brace yourselves.", "_____no_output_____" ], [ "# CIFAR dataset\nThis week, we shall focus on the image recognition problem on cifar10 dataset\n* 60k images of shape 3x32x32\n* 10 different classes: planes, dogs, cats, trucks, etc.\n\n<img src=\"cifar10.jpg\" style=\"width:80%\">", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom cifar import load_cifar10\nX_train,y_train,X_val,y_val,X_test,y_test = load_cifar10(\"cifar_data\")\n\nclass_names = np.array(['airplane','automobile ','bird ','cat ','deer ','dog ','frog ','horse ','ship ','truck'])\n\nprint (X_train.shape,y_train.shape)", "_____no_output_____" ], [ "\nimport matplotlib.pyplot as plt\n%matplotlib inline\n\nplt.figure(figsize=[12,10])\nfor i in range(12):\n plt.subplot(3,4,i+1)\n plt.xlabel(class_names[y_train[i]])\n plt.imshow(np.transpose(X_train[i],[1,2,0]))", "_____no_output_____" ] ], [ [ "# Building a network\n\nSimple neural networks with layers applied on top of one another can be implemented as `torch.nn.Sequential` - just add a list of pre-built modules and let it train.", "_____no_output_____" ] ], [ [ "import torch, torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.autograd import Variable\n\n# a special module that converts [batch, channel, w, h] to [batch, units]\nclass Flatten(nn.Module):\n def forward(self, input):\n return input.view(input.size(0), -1)", "_____no_output_____" ] ], [ [ "Let's start with a dense network for our baseline:", "_____no_output_____" ] ], [ [ "model = nn.Sequential()\n\n# reshape from \"images\" to flat vectors\nmodel.add_module('flatten', Flatten())\n\n# dense \"head\"\nmodel.add_module('dense1', nn.Linear(3 * 32 * 32, 64))\nmodel.add_module('dense1_relu', nn.ReLU())\nmodel.add_module('dense2_logits', nn.Linear(64, 10)) # logits for 10 classes", "_____no_output_____" ] ], [ [ "As in our basic tutorial, we train our model with negative log-likelihood aka crossentropy.", "_____no_output_____" ] ], [ [ "def compute_loss(X_batch, y_batch):\n X_batch = Variable(torch.FloatTensor(X_batch))\n y_batch = Variable(torch.LongTensor(y_batch))\n logits = model(X_batch)\n return F.cross_entropy(logits, y_batch).mean()", "_____no_output_____" ], [ "# example\ncompute_loss(X_train[:5], y_train[:5])", "_____no_output_____" ] ], [ [ "### Training on minibatches\n* We got 40k images, that's way too many for a full-batch SGD. Let's train on minibatches instead\n* Below is a function that splits the training sample into minibatches", "_____no_output_____" ] ], [ [ "# An auxilary function that returns mini-batches for neural network training\ndef iterate_minibatches(X, y, batchsize):\n indices = np.random.permutation(np.arange(len(X)))\n for start in range(0, len(indices), batchsize):\n ix = indices[start: start + batchsize]\n yield X[ix], y[ix]", "_____no_output_____" ], [ "opt = torch.optim.SGD(model.parameters(), lr=0.01)\n\ntrain_loss = []\nval_accuracy = []", "_____no_output_____" ], [ "import time\nnum_epochs = 100 # total amount of full passes over training data\nbatch_size = 50 # number of samples processed in one SGD iteration\n\nfor epoch in range(num_epochs):\n # In each epoch, we do a full pass over the training data:\n start_time = time.time()\n model.train(True) # enable dropout / batch_norm training behavior\n for X_batch, y_batch in iterate_minibatches(X_train, y_train, batch_size):\n # train on batch\n loss = compute_loss(X_batch, y_batch)\n loss.backward()\n opt.step()\n opt.zero_grad()\n train_loss.append(loss.data.numpy())\n \n # And a full pass over the validation data:\n model.train(False) # disable dropout / use averages for batch_norm\n for X_batch, y_batch in iterate_minibatches(X_val, y_val, batch_size):\n logits = model(Variable(torch.FloatTensor(X_batch)))\n y_pred = logits.max(1)[1].data.numpy()\n val_accuracy.append(np.mean(y_batch == y_pred))\n\n \n # Then we print the results for this epoch:\n print(\"Epoch {} of {} took {:.3f}s\".format(\n epoch + 1, num_epochs, time.time() - start_time))\n print(\" training loss (in-iteration): \\t{:.6f}\".format(\n np.mean(train_loss[-len(X_train) // batch_size :])))\n print(\" validation accuracy: \\t\\t\\t{:.2f} %\".format(\n np.mean(val_accuracy[-len(X_val) // batch_size :]) * 100))", "_____no_output_____" ] ], [ [ "Don't wait for full 100 epochs. You can interrupt training after 5-20 epochs once validation accuracy stops going up.\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n### Final test", "_____no_output_____" ] ], [ [ "model.train(False) # disable dropout / use averages for batch_norm\ntest_batch_acc = []\nfor X_batch, y_batch in iterate_minibatches(X_test, y_test, 500):\n logits = model(Variable(torch.FloatTensor(X_batch)))\n y_pred = logits.max(1)[1].data.numpy()\n test_batch_acc.append(np.mean(y_batch == y_pred))\n\ntest_accuracy = np.mean(test_batch_acc)\n \nprint(\"Final results:\")\nprint(\" test accuracy:\\t\\t{:.2f} %\".format(\n test_accuracy * 100))\n\nif test_accuracy * 100 > 95:\n print(\"Double-check, than consider applying for NIPS'17. SRSly.\")\nelif test_accuracy * 100 > 90:\n print(\"U'r freakin' amazin'!\")\nelif test_accuracy * 100 > 80:\n print(\"Achievement unlocked: 110lvl Warlock!\")\nelif test_accuracy * 100 > 70:\n print(\"Achievement unlocked: 80lvl Warlock!\")\nelif test_accuracy * 100 > 60:\n print(\"Achievement unlocked: 70lvl Warlock!\")\nelif test_accuracy * 100 > 50:\n print(\"Achievement unlocked: 60lvl Warlock!\")\nelse:\n print(\"We need more magic! Follow instructons below\")", "_____no_output_____" ] ], [ [ "## Task I: small convolution net\n### First step\n\nLet's create a mini-convolutional network with roughly such architecture:\n* Input layer\n* 3x3 convolution with 10 filters and _ReLU_ activation\n* 2x2 pooling (or set previous convolution stride to 3)\n* Flatten\n* Dense layer with 100 neurons and _ReLU_ activation\n* 10% dropout\n* Output dense layer.\n\n\n__Convolutional layers__ in torch are just like all other layers, but with a specific set of parameters:\n\n__`...`__\n\n__`model.add_module('conv1', nn.Conv2d(in_channels=3, out_channels=10, kernel_size=3)) # convolution`__\n\n__`model.add_module('pool1', nn.MaxPool2d(2)) # max pooling 2x2`__\n\n__`...`__\n\n\nOnce you're done (and compute_loss no longer raises errors), train it with __Adam__ optimizer with default params (feel free to modify the code above).\n\nIf everything is right, you should get at least __50%__ validation accuracy.", "_____no_output_____" ], [ "```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n__Hint:__ If you don't want to compute shapes by hand, just plug in any shape (e.g. 1 unit) and run compute_loss. You will see something like this:\n\n__`RuntimeError: size mismatch, m1: [5 x 1960], m2: [1 x 64] at /some/long/path/to/torch/operation`__\n\nSee the __1960__ there? That's your actual input shape.\n\n## Task 2: adding normalization\n\n* Add batch norm (with default params) between convolution and ReLU\n * nn.BatchNorm*d (1d for dense, 2d for conv)\n * usually better to put them after linear/conv but before nonlinearity\n* Re-train the network with the same optimizer, it should get at least 60% validation accuracy at peak.\n\n", "_____no_output_____" ], [ "\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n## Task 3: Data Augmentation\n\nThere's a powerful torch tool for image preprocessing useful to do data preprocessing and augmentation.\n\nHere's how it works: we define a pipeline that\n* makes random crops of data (augmentation)\n* randomly flips image horizontally (augmentation)\n* then normalizes it (preprocessing)", "_____no_output_____" ] ], [ [ "from torchvision import transforms\nmeans = np.array((0.4914, 0.4822, 0.4465))\nstds = np.array((0.2023, 0.1994, 0.2010))\n\ntransform_augment = transforms.Compose([\n transforms.RandomCrop(32, padding=4),\n transforms.RandomRotation([-30, 30]),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n transforms.Normalize(means, stds),\n])", "_____no_output_____" ], [ "from torchvision.datasets import CIFAR10\ntrain_loader = CIFAR10(\"./cifar_data/\", train=True, transform=transform_augment)\n\ntrain_batch_gen = torch.utils.data.DataLoader(train_loader, \n batch_size=32,\n shuffle=True,\n num_workers=1)", "_____no_output_____" ], [ "\nfor (x_batch, y_batch) in train_batch_gen:\n \n print('X:', type(x_batch), x_batch.shape)\n print('y:', type(y_batch), y_batch.shape)\n \n for i, img in enumerate(x_batch.numpy()[:8]):\n plt.subplot(2, 4, i+1)\n plt.imshow(img.transpose([1,2,0]) * stds + means )\n \n \n raise NotImplementedError(\"Plese use this code in your training loop\")\n # TODO use this in your training loop", "_____no_output_____" ] ], [ [ "When testing, we don't need random crops, just normalize with same statistics.", "_____no_output_____" ] ], [ [ "transform_test = transforms.Compose([\n transforms.ToTensor(),\n transforms.Normalize(means, stds),\n])\n\ntest_loader = <YOUR CODE>", "_____no_output_____" ] ], [ [ "# Homework 2.2: The Quest For A Better Network\n\nIn this assignment you will build a monster network to solve CIFAR10 image classification.\n\nThis notebook is intended as a sequel to seminar 3, please give it a try if you haven't done so yet.", "_____no_output_____" ], [ "(please read it at least diagonally)\n\n* The ultimate quest is to create a network that has as high __accuracy__ as you can push it.\n* There is a __mini-report__ at the end that you will have to fill in. We recommend reading it first and filling it while you iterate.\n \n## Grading\n* starting at zero points\n* +20% for describing your iteration path in a report below.\n* +20% for building a network that gets above 20% accuracy\n* +10% for beating each of these milestones on __TEST__ dataset:\n * 50% (50% points)\n * 60% (60% points)\n * 65% (70% points)\n * 70% (80% points)\n * 75% (90% points)\n * 80% (full points)\n \n## Restrictions\n* Please do NOT use pre-trained networks for this assignment until you reach 80%.\n * In other words, base milestones must be beaten without pre-trained nets (and such net must be present in the e-mail). After that, you can use whatever you want.\n* you __can__ use validation data for training, but you __can't'__ do anything with test data apart from running the evaluation procedure.\n\n## Tips on what can be done:\n\n\n * __Network size__\n * MOAR neurons, \n * MOAR layers, ([torch.nn docs](http://pytorch.org/docs/master/nn.html))\n\n * Nonlinearities in the hidden layers\n * tanh, relu, leaky relu, etc\n * Larger networks may take more epochs to train, so don't discard your net just because it could didn't beat the baseline in 5 epochs.\n\n * Ph'nglui mglw'nafh Cthulhu R'lyeh wgah'nagl fhtagn!\n\n\n### The main rule of prototyping: one change at a time\n * By now you probably have several ideas on what to change. By all means, try them out! But there's a catch: __never test several new things at once__.\n\n\n### Optimization\n * Training for 100 epochs regardless of anything is probably a bad idea.\n * Some networks converge over 5 epochs, others - over 500.\n * Way to go: stop when validation score is 10 iterations past maximum\n * You should certainly use adaptive optimizers\n * rmsprop, nesterov_momentum, adam, adagrad and so on.\n * Converge faster and sometimes reach better optima\n * It might make sense to tweak learning rate/momentum, other learning parameters, batch size and number of epochs\n * __BatchNormalization__ (nn.BatchNorm2d) for the win!\n * Sometimes more batch normalization is better.\n * __Regularize__ to prevent overfitting\n * Add some L2 weight norm to the loss function, PyTorch will do the rest\n * Can be done manually or like [this](https://discuss.pytorch.org/t/simple-l2-regularization/139/2).\n * Dropout (`nn.Dropout`) - to prevent overfitting\n * Don't overdo it. Check if it actually makes your network better\n \n### Convolution architectures\n * This task __can__ be solved by a sequence of convolutions and poolings with batch_norm and ReLU seasoning, but you shouldn't necessarily stop there.\n * [Inception family](https://hacktilldawn.com/2016/09/25/inception-modules-explained-and-implemented/), [ResNet family](https://towardsdatascience.com/an-overview-of-resnet-and-its-variants-5281e2f56035?gi=9018057983ca), [Densely-connected convolutions (exotic)](https://arxiv.org/abs/1608.06993), [Capsule networks (exotic)](https://arxiv.org/abs/1710.09829)\n * Please do try a few simple architectures before you go for resnet-152.\n * Warning! Training convolutional networks can take long without GPU. That's okay.\n * If you are CPU-only, we still recomment that you try a simple convolutional architecture\n * a perfect option is if you can set it up to run at nighttime and check it up at the morning.\n * Make reasonable layer size estimates. A 128-neuron first convolution is likely an overkill.\n * __To reduce computation__ time by a factor in exchange for some accuracy drop, try using __stride__ parameter. A stride=2 convolution should take roughly 1/4 of the default (stride=1) one.\n \n \n### Data augmemntation\n * getting 5x as large dataset for free is a great \n * Zoom-in+slice = move\n * Rotate+zoom(to remove black stripes)\n * Add Noize (gaussian or bernoulli)\n * Simple way to do that (if you have PIL/Image): \n * ```from scipy.misc import imrotate,imresize```\n * and a few slicing\n * Other cool libraries: cv2, skimake, PIL/Pillow\n * A more advanced way is to use torchvision transforms:\n ```\n transform_train = transforms.Compose([\n transforms.RandomCrop(32, padding=4),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),\n ])\n trainset = torchvision.datasets.CIFAR10(root=path_to_cifar_like_in_seminar, train=True, download=True, transform=transform_train)\n trainloader = torch.utils.data.DataLoader(trainset, batch_size=128, shuffle=True, num_workers=2)\n\n ```\n * Or use this tool from Keras (requires theano/tensorflow): [tutorial](https://blog.keras.io/building-powerful-image-classification-models-using-very-little-data.html), [docs](https://keras.io/preprocessing/image/)\n * Stay realistic. There's usually no point in flipping dogs upside down as that is not the way you usually see them.\n \n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n```\n\n\n \nThere is a template for your solution below that you can opt to use or throw away and write it your way.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
cbfc450237bc94fe964f4e9fca42b2761e3adb37
562,739
ipynb
Jupyter Notebook
ImageBasedSearchEngine.ipynb
pulkit10251/Project-5-IMAGE_BASED_SEARCH_ENGINE
015b912225274debeaa3b6f5f0cdc2db07b58897
[ "MIT" ]
null
null
null
ImageBasedSearchEngine.ipynb
pulkit10251/Project-5-IMAGE_BASED_SEARCH_ENGINE
015b912225274debeaa3b6f5f0cdc2db07b58897
[ "MIT" ]
null
null
null
ImageBasedSearchEngine.ipynb
pulkit10251/Project-5-IMAGE_BASED_SEARCH_ENGINE
015b912225274debeaa3b6f5f0cdc2db07b58897
[ "MIT" ]
null
null
null
2,259.995984
557,520
0.961634
[ [ [ "# IMAGE BASED SEARCH ENGINE", "_____no_output_____" ] ], [ [ "from sklearn.metrics.pairwise import cosine_similarity\nimport pickle\nfrom keras.preprocessing import image\nfrom tensorflow.keras.applications.resnet50 import ResNet50,preprocess_input\nfrom tensorflow.keras.models import Model\nimport numpy as np\nfrom collections import Counter\nfrom gensim.matutils import softcossim\nimport matplotlib.pyplot as plt", "Using TensorFlow backend.\n" ], [ "# OPENING THE VECTOR FILE\nvector_file=open(\"vector_dict.pkl\",\"rb\")\nvector_dict=pickle.load(vector_file)", "_____no_output_____" ], [ "# initializing the model\nmodel=ResNet50(weights='imagenet',input_shape=(224,224,3))\nnew_model=Model(model.input,model.layers[-2].output)", "_____no_output_____" ], [ "# preprocess input\ndef preprocess_image(img):\n img=image.load_img(img,target_size=(224,224,3))\n img=image.img_to_array(img)\n img=img.reshape((1,224,224,3))\n img=preprocess_input(img)\n return img\n# pr\ndef cos_similarity(vector,similarity,li):\n for key,value in vector_dict.items():\n val=cosine_similarity(vector,value.reshape((1,-1)))\n similarity[key]=val\n a=Counter(similarity)\n top_10=a.most_common(10)\n for i in range(len(top_10)):\n a,b=top_10[i]\n li.append(a)\n return li\n ", "_____no_output_____" ], [ "def testing(img_path):\n test_image=preprocess_image(img_path)\n test_vector=new_model.predict(test_image)\n test_vector=test_vector.reshape((1,-1))\n similarity={}\n li=[]\n top_10=cos_similarity(test_vector,similarity,li)\n fig=plt.figure(figsize=(15,15))\n columns=5\n rows=2\n for i in range(1,11):\n path=\"./combined data/\"+li[i-1]\n img=image.load_img(path)\n fig.add_subplot(rows,columns,i)\n plt.imshow(img)\n \n plt.show()\n ", "_____no_output_____" ], [ "testimage=\"./test images/faces.jpg\"\ntesting(testimage)", "_____no_output_____" ], [ "\n", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ] ]
cbfc5201bdfe7f04b6fe2a99aaadbba0ebb5672e
2,712
ipynb
Jupyter Notebook
lessons/exercises/Exercises-Intelligent-Agents-and-Active-Inference.ipynb
Yikeru/BMLIP
296f5330210d387809b2c3ce7a6847f2bd69b24c
[ "CC-BY-3.0" ]
10
2019-09-14T17:34:14.000Z
2022-01-22T18:29:11.000Z
lessons/exercises/Exercises-Intelligent-Agents-and-Active-Inference.ipynb
Yikeru/BMLIP
296f5330210d387809b2c3ce7a6847f2bd69b24c
[ "CC-BY-3.0" ]
34
2019-08-09T15:49:10.000Z
2021-11-14T10:19:28.000Z
lessons/exercises/Exercises-Intelligent-Agents-and-Active-Inference.ipynb
Yikeru/BMLIP
296f5330210d387809b2c3ce7a6847f2bd69b24c
[ "CC-BY-3.0" ]
11
2020-03-18T14:05:09.000Z
2022-01-04T14:35:32.000Z
53.176471
534
0.661504
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
cbfc5500b069c9310e3ad83ff4458bc3e37b7c1d
1,502
ipynb
Jupyter Notebook
Linear Regression/Linear Regression.ipynb
sjmiller8182/ML_Class
17f6d0ae184a113265a3e1a97c667d7b798c6f8f
[ "MIT" ]
null
null
null
Linear Regression/Linear Regression.ipynb
sjmiller8182/ML_Class
17f6d0ae184a113265a3e1a97c667d7b798c6f8f
[ "MIT" ]
null
null
null
Linear Regression/Linear Regression.ipynb
sjmiller8182/ML_Class
17f6d0ae184a113265a3e1a97c667d7b798c6f8f
[ "MIT" ]
null
null
null
25.457627
87
0.588549
[ [ [ "# TODO: Add import statements\nfrom sklearn.linear_model import LinearRegression\nimport pandas as pd\n\n# Assign the dataframe to this variable.\n# TODO: Load the data\nbmi_life_data = pd.read_csv('bmi_and_life_expectancy.txt') \n\n# Make and fit the linear regression model\n#TODO: Fit the model and Assign it to bmi_life_model\nbmi_life_model = LinearRegression()\nbmi_life_model.fit(bmi_life_data[['BMI']], bmi_life_data[['Life expectancy']])\n\n# Make a prediction using the model\n# TODO: Predict life expectancy for a BMI value of 21.07931\nlaos_life_exp = bmi_life_model.predict(21.07931)\nprint('Life expectancy for Laos:', laos_life_exp[0][0])", "Life expectancy for Laos: 60.315647164\n" ] ] ]
[ "code" ]
[ [ "code" ] ]
cbfc66e92ffb81a0a1847d55b35ddb2f2435ebe4
20,742
ipynb
Jupyter Notebook
src/prj01.ipynb
G750cloud/20MA573
6450c6a69542b9e1de37db2215cedfba0ba68621
[ "MIT" ]
null
null
null
src/prj01.ipynb
G750cloud/20MA573
6450c6a69542b9e1de37db2215cedfba0ba68621
[ "MIT" ]
null
null
null
src/prj01.ipynb
G750cloud/20MA573
6450c6a69542b9e1de37db2215cedfba0ba68621
[ "MIT" ]
null
null
null
83.637097
15,292
0.811108
[ [ [ "<a href=\"https://colab.research.google.com/github/G750cloud/20MA573/blob/master/src/prj01.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "# First Python Notebook Project", "_____no_output_____" ] ], [ [ "-The simple code to implement", "_____no_output_____" ], [ "italicized text# New Section", "_____no_output_____" ], [ "We will define a funtion of \n$\\phi(x) = \\frac{1}{\\sqrt{2\\pi}}e^{-\\frac{x^2}{2}}$", "_____no_output_____" ] ], [ [ "import numpy as np\n\ndef phi(x):\n out = 1./np.sqrt(2.*np.pi)*np.exp(-x**2/2.)\n return out", "_____no_output_____" ], [ "phi(2)", "_____no_output_____" ], [ "import matplotlib.pyplot as plt\n%matplotlib inline\nx_cod = np.linspace(-5,5,200)\ny_cod = phi(x_cod)\nplt.plot(x_cod, y_cod)", "_____no_output_____" ], [ "x_cod", "_____no_output_____" ], [ "", "_____no_output_____" ], [ "", "_____no_output_____" ], [ "", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfc75be9fc1191f77f2e68f971f8fda68ec24e8
4,061
ipynb
Jupyter Notebook
ipynb/Zimbabwe.ipynb
RobertRosca/oscovida.github.io
d609949076e3f881e38ec674ecbf0887e9a2ec25
[ "CC-BY-4.0" ]
null
null
null
ipynb/Zimbabwe.ipynb
RobertRosca/oscovida.github.io
d609949076e3f881e38ec674ecbf0887e9a2ec25
[ "CC-BY-4.0" ]
null
null
null
ipynb/Zimbabwe.ipynb
RobertRosca/oscovida.github.io
d609949076e3f881e38ec674ecbf0887e9a2ec25
[ "CC-BY-4.0" ]
null
null
null
28.398601
162
0.508742
[ [ [ "# Zimbabwe\n\n* Homepage of project: https://oscovida.github.io\n* [Execute this Jupyter Notebook using myBinder](https://mybinder.org/v2/gh/oscovida/binder/master?filepath=ipynb/Zimbabwe.ipynb)", "_____no_output_____" ] ], [ [ "import datetime\nimport time\n\nstart = datetime.datetime.now()\nprint(f\"Notebook executed on: {start.strftime('%d/%m/%Y %H:%M:%S%Z')} {time.tzname[time.daylight]}\")", "_____no_output_____" ], [ "%config InlineBackend.figure_formats = ['svg']\nfrom oscovida import *", "_____no_output_____" ], [ "overview(\"Zimbabwe\");", "_____no_output_____" ], [ "# load the data\ncases, deaths, region_label = get_country_data(\"Zimbabwe\")\n\n# compose into one table\ntable = compose_dataframe_summary(cases, deaths)\n\n# show tables with up to 500 rows\npd.set_option(\"max_rows\", 500)\n\n# display the table\ntable", "_____no_output_____" ] ], [ [ "# Explore the data in your web browser\n\n- If you want to execute this notebook, [click here to use myBinder](https://mybinder.org/v2/gh/oscovida/binder/master?filepath=ipynb/Zimbabwe.ipynb)\n- and wait (~1 to 2 minutes)\n- Then press SHIFT+RETURN to advance code cell to code cell\n- See http://jupyter.org for more details on how to use Jupyter Notebook", "_____no_output_____" ], [ "# Acknowledgements:\n\n- Johns Hopkins University provides data for countries\n- Robert Koch Institute provides data for within Germany\n- Open source and scientific computing community for the data tools\n- Github for hosting repository and html files\n- Project Jupyter for the Notebook and binder service\n- The H2020 project Photon and Neutron Open Science Cloud ([PaNOSC](https://www.panosc.eu/))\n\n--------------------", "_____no_output_____" ] ], [ [ "print(f\"Download of data from Johns Hopkins university: cases at {fetch_cases_last_execution()} and \"\n f\"deaths at {fetch_deaths_last_execution()}.\")", "_____no_output_____" ], [ "# to force a fresh download of data, run \"clear_cache()\"", "_____no_output_____" ], [ "print(f\"Notebook execution took: {datetime.datetime.now()-start}\")\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ] ]
cbfc8a7b05e9d6e50777d7146b33e9b74b625a76
22,548
ipynb
Jupyter Notebook
test.ipynb
volodia99/booklet
8c07a3f2538979a8e1ab5594b2915e5a75609ecf
[ "MIT" ]
null
null
null
test.ipynb
volodia99/booklet
8c07a3f2538979a8e1ab5594b2915e5a75609ecf
[ "MIT" ]
null
null
null
test.ipynb
volodia99/booklet
8c07a3f2538979a8e1ab5594b2915e5a75609ecf
[ "MIT" ]
null
null
null
247.78022
20,620
0.93179
[ [ [ "import colorblind as cb\nimport numpy as np\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "np.arange(100)", "_____no_output_____" ], [ "cb.test_mapping('rainbow')", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code" ] ]
cbfc9e4f50d110675053c88581cbb9dee63a1e79
15,425
ipynb
Jupyter Notebook
7.16.ipynb
xpw123/mysql
a40b3b615cb2b439fb516be05523ce8e85f0ca26
[ "Apache-2.0" ]
null
null
null
7.16.ipynb
xpw123/mysql
a40b3b615cb2b439fb516be05523ce8e85f0ca26
[ "Apache-2.0" ]
null
null
null
7.16.ipynb
xpw123/mysql
a40b3b615cb2b439fb516be05523ce8e85f0ca26
[ "Apache-2.0" ]
null
null
null
17.853009
318
0.4494
[ [ [ "# 基本程序设计\n- 一切代码输入,请使用英文输入法", "_____no_output_____" ] ], [ [ "print('hello word')", "hello word\n" ], [ "print('hello word')", "hello word\n" ], [ "print 'hello'", "_____no_output_____" ] ], [ [ "## 编写一个简单的程序\n- 圆公式面积: area = radius \\* radius \\* 3.1415", "_____no_output_____" ] ], [ [ "radius = 1\narea = radius * radius * 3.1415\nprint(area)", "3.1415\n" ], [ "radius = 1.0\narea = radius * radius * 3.14 # 将后半部分的结果赋值给变量area\n# 变量一定要有初始值!!!\n# radius: 变量.area: 变量!\n# int 类型\nprint(area)", "3.14\n" ] ], [ [ "### 在Python里面不需要定义数据的类型", "_____no_output_____" ], [ "## 控制台的读取与输入\n- input 输入进去的是字符串\n- eval", "_____no_output_____" ] ], [ [ "input('请输入半径:')", "请输入半径:1\n" ], [ "name=input('请输入名字:')\nprint(name,'666666')", "请输入名字:wm\nwm 666666\n" ], [ "input('请输入半径:')", "_____no_output_____" ], [ "radius = input('请输入半径') # input得到的结果是字符串类型\nradius = float(radius)\narea = radius * radius * 3.14\nprint('面积为:',area)", "_____no_output_____" ], [ "radius = eval(input('请输入半径'))\nprint(radius)\nprint(type(radius))\narea = radius * radius * 3.1415\nprint(area)", "请输入半径2\n2\n<class 'int'>\n12.566\n" ], [ "gao = eval(input('请输入高'))\ndi = eval(input ('请输入底'))\narea = gao*di*0.5\nprint(area)", "请输入高5\n请输入底6\n15.0\n" ], [ "import random\nnumber = random.randint(0,10)\nprint(number)\nif (number % 2) == 0:\n print('中奖')\nelse:\n print('谢谢惠顾')\n ", "2\n中奖\n" ] ], [ [ "- 在jupyter用shift + tab 键可以跳出解释文档", "_____no_output_____" ], [ "## 变量命名的规范\n- 由字母、数字、下划线构成\n- 不能以数字开头 \\*\n- 标识符不能是关键词(实际上是可以强制改变的,但是对于代码规范而言是极其不适合)\n- 可以是任意长度\n- 驼峰式命名", "_____no_output_____" ], [ "## 变量、赋值语句和赋值表达式\n- 变量: 通俗理解为可以变化的量\n- x = 2 \\* x + 1 在数学中是一个方程,而在语言中它是一个表达式\n- test = test + 1 \\* 变量在赋值之前必须有值", "_____no_output_____" ], [ "## 同时赋值\nvar1, var2,var3... = exp1,exp2,exp3...", "_____no_output_____" ], [ "## 定义常量\n- 常量:表示一种定值标识符,适合于多次使用的场景。比如PI\n- 注意:在其他低级语言中如果定义了常量,那么,该常量是不可以被改变的,但是在Python中一切皆对象,常量也是可以被改变的", "_____no_output_____" ], [ "## 数值数据类型和运算符\n- 在Python中有两种数值类型(int 和 float)适用于加减乘除、模、幂次\n<img src = \"../Photo/01.jpg\"></img>", "_____no_output_____" ], [ "## 运算符 /、//、**", "_____no_output_____" ] ], [ [ "input()", "_____no_output_____" ] ], [ [ "## 运算符 %", "_____no_output_____" ] ], [ [ "25/4", "_____no_output_____" ], [ "25 //4", "_____no_output_____" ], [ "number=int(input('输入一个数'))\nif number%2==0:\n print('偶数')\nelse:\n print('奇数')", "_____no_output_____" ], [ "seconds = eval(input('seconds:>>'))\nmins = seconds // 60\nseconds = seconds % 60\nprint(mins,\"分\",seconds,\"秒\")", "seconds:>>500\n8 分 20 秒\n" ], [ "week = eval(input('week:'))\nplus_day = eval(input('days:'))\nres = (week + plus_day) % 7\nprint(res)", "week:12\ndays:12\n3\n" ] ], [ [ "## 科学计数法\n- 1.234e+2\n- 1.234e-2", "_____no_output_____" ] ], [ [ "import numpy as up", "_____no_output_____" ], [ "(3+4*x)/5 - (10*(y-5)*(a+b+c)/x) + 9*((4/x)+(9+x)/y)\n# graph\npart_1=(3+4*x)/5\nprat_2=(10*(y-5)*(a+b+c)/x)\nprat_3=9*((4/x)+(9+x)/y)", "_____no_output_____" ] ], [ [ "## 计算表达式和运算优先级\n<img src = \"../Photo/02.png\"></img>\n<img src = \"../Photo/03.png\"></img>", "_____no_output_____" ], [ "## 增强型赋值运算\n<img src = \"../Photo/04.png\"></img>", "_____no_output_____" ], [ "## 类型转换\n- float -> int\n- 四舍五入 round", "_____no_output_____" ] ], [ [ "a = 1\nfloat(a) # 强制转换", "_____no_output_____" ], [ "round(1.498895,3) #逗号后面的数字就是保留的位数", "_____no_output_____" ] ], [ [ "## EP:\n- 如果一个年营业税为0.06%,那么对于197.55e+2的年收入,需要交税为多少?(结果保留2为小数)\n- 必须使用科学计数法", "_____no_output_____" ] ], [ [ "round((197.55e+2)*0.06/100,2)", "_____no_output_____" ], [ "#输入月供,输出总还款数\n贷款数 = eval(input('输入贷款数'))\n月利率 = 0.01\n年限= 5\n月供 = (贷款数 * 月利率)/ (1-1(1+月利率)**(年限 * 12))", "_____no_output_____" ] ], [ [ "# Project\n- 用Python写一个贷款计算器程序:输入的是月供(monthlyPayment) 输出的是总还款数(totalpayment)\n![](../Photo/05.png)", "_____no_output_____" ], [ "# Homework\n- 1\n<img src=\"../Photo/06.png\"></img>", "_____no_output_____" ] ], [ [ "import math\na=float(input('输入摄氏度'))\nb=a*9/5+32", "输入摄氏度6\n" ] ], [ [ "- 2\n<img src=\"../Photo/07.png\"></img>", "_____no_output_____" ] ], [ [ "import math\nradius= eval(input('输入半径:'))\narea=radius * radius * math.pi\nprint('底面积:',area)\nlength=eval(input('输入高:'))\nvolume=area * length\nprint('体积为:',volume)", "输入半径:6\n底面积: 113.09733552923255\n输入高:5\n体积为: 565.4866776461628\n" ] ], [ [ "- 3\n<img src=\"../Photo/08.png\"></img>", "_____no_output_____" ] ], [ [ "feet=eval(input('输入英尺'))\nm=feet*0.305\nprint('%0.1f英尺为%0.1f米' %(feet,m))", "输入英尺6\n6.0英尺为1.8米\n" ] ], [ [ "- 6\n<img src=\"../Photo/12.png\"></img>", "_____no_output_____" ] ], [ [ "v0=eval(input('输入初始速度:'))\nv1=eval(input('输入末速度:'))\nt=eval(input('输入时间:'))\na=(v1-v0)/t\nprint('加速度;',a)", "输入初始速度:5\n输入末速度:5\n输入时间:5\n加速度; 0.0\n" ], [ "m=eval(input('输入水量'))\ninitialtemperature=eval(input('输入初始温度:'))\nfinaltemperature=eval(input('输入最终温度:'))\nq=m*(finaltemperature - initialtemperature) * 4184\nprint('能量为:',q)", "输入水量2\n输入初始温度:2\n输入最终温度:2\n能量为: 0\n" ] ], [ [ "- 7 进阶\n<img src=\"../Photo/13.png\"></img>", "_____no_output_____" ] ], [ [ "money=eval(input('存到银行金额:'))\nyuelilv=1+(0.05/12)\nmoney1=money*yuelilv\nmoney2=(money1+money)*yuelilv\nmoney3=(money2+money)*yuelilv\nmoney4=(money3+money)*yuelilv\nmoney5=(money4+money)*yuelilv\nmoney6=(money5+money)*yuelilv\nprint(round(money6,2))", "存到银行金额:6\n36.53\n" ], [ "balance=eval(input('输入差额'))\ninterestrate=eval(input('输入年利率'))\nlixi=balance * (interestrate/1200)\nprint('利息为:',lixi)", "输入差额5\n输入年利率5\n利息为: 0.020833333333333332\n" ] ], [ [ "- 8 进阶\n<img src=\"../Photo/14.png\"></img>", "_____no_output_____" ] ], [ [ "number=eval(input('输入0-1000之间的整数:'))\nsum=number%10+(number%100)//10+(number%1000)//100\nprint(sum)", "输入0-1000之间的整数:55\n10\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ] ]
cbfca47e8a3ec91967a059368567b6de4d7e3678
3,775
ipynb
Jupyter Notebook
colab/Symbolic_Music_Generation_with_Diffusion_Models.ipynb
voodoohop/symbolic-music-diffusion
582ddcc882f0c7b112cb5a8cf54003d107a940e1
[ "Apache-2.0" ]
null
null
null
colab/Symbolic_Music_Generation_with_Diffusion_Models.ipynb
voodoohop/symbolic-music-diffusion
582ddcc882f0c7b112cb5a8cf54003d107a940e1
[ "Apache-2.0" ]
null
null
null
colab/Symbolic_Music_Generation_with_Diffusion_Models.ipynb
voodoohop/symbolic-music-diffusion
582ddcc882f0c7b112cb5a8cf54003d107a940e1
[ "Apache-2.0" ]
null
null
null
27.355072
284
0.516556
[ [ [ "<a href=\"https://colab.research.google.com/github/voodoohop/symbolic-music-diffusion/blob/main/colab/Symbolic_Music_Generation_with_Diffusion_Models.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "!git clone https://github.com/voodoohop/symbolic-music-diffusion\n%cd symbolic-music-diffusion\n", "_____no_output_____" ], [ "#@title Requirements\n!pip install -r requirements.txt\n!sudo apt-get install build-essential libasound2-dev libjack-dev portaudio19-dev\n!pip install magenta", "_____no_output_____" ], [ "#@title Get Lakh MIDI Dataset\n%cd /content\n!wget http://hog.ee.columbia.edu/craffel/lmd/lmd_full.tar.gz\n!tar -xzvf lmd_full.tar.gz\n%cd -", "_____no_output_____" ], [ "#@title MIDI -> TFRecord\n!mkdir -p /content/notesequences\n\n# TFRecord file that will contain NoteSequence protocol buffers.\n\n!convert_dir_to_note_sequences \\\n --input_dir=/content/lmd_full \\\n --output_file=/content/notesequences/notesequences.tfrecord \\\n --recursive", "_____no_output_____" ], [ "#@title MusicVAE Checkpoint (2 bar melody)\n!wget https://storage.googleapis.com/magentadata/models/music_vae/checkpoints/cat-mel_2bar_big.tar\n!tar -xvf cat-mel_2bar_big.tar\n!mkdir -p /content/musicvae\n!mv -v cat-mel_2bar_big.ckpt.* /content/musicvae", "_____no_output_____" ], [ "#@title Generate Song Data\n!mkdir -p /content/songdata\n!python -m scripts.generate_song_data_beam \\\n --checkpoint=/content/musicvae \\\n --input=/content/notesequences/notesequences.tfrecord \\\n --output=/content/songdata/", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ] ]
cbfcb8cc4772099698b42cbc441e69f5858be957
953,219
ipynb
Jupyter Notebook
02CNN/01ImagePreprocess_sol.ipynb
aonekoda/pytorch
1433ec75404bf0823e1499ba0a24293b6ff5ab37
[ "MIT" ]
null
null
null
02CNN/01ImagePreprocess_sol.ipynb
aonekoda/pytorch
1433ec75404bf0823e1499ba0a24293b6ff5ab37
[ "MIT" ]
null
null
null
02CNN/01ImagePreprocess_sol.ipynb
aonekoda/pytorch
1433ec75404bf0823e1499ba0a24293b6ff5ab37
[ "MIT" ]
null
null
null
3,491.644689
579,116
0.961765
[ [ [ "# Loading Image Data\n\n강아지와 고양이를 구분하는 이미지 분류기를 생성하기 위해서는 고양이와 강아지 사진을 모아야 한다. 임의로 수집된 다음과 같은 고양이/강아지 사진을 사용하자.\n![img](../assets/dog_cat.png)\n이 사진을 사용하여 CNN으로 이미지 분류기를 만들기 위해서는 해당 사진을 적절히 전처리하여야 한다.\n", "_____no_output_____" ] ], [ [ "%matplotlib inline\n%config InlineBackend.figure_format = 'retina'\n\nimport matplotlib.pyplot as plt\n\nimport torch\nfrom torchvision import datasets, transforms\n", "_____no_output_____" ] ], [ [ "이미지를 전처리하기 위해 가장 쉽고 편한 방법은 `torchvision`패키지의 `datasets.ImageFolder` 을 사용하는 것이다. ([documentation](http://pytorch.org/docs/master/torchvision/datasets.html#imagefolder)). 일반적으로 `ImageFolder` 의 사용법은 다음과 같다.:\n\n```python\ndataset = datasets.ImageFolder('path/to/data', transform=transform)\n```\n\n`'path/to/data'` 은 이미지가 있는 디렉토리이다. `transform`은 이미지를 전처리하기 위한 방법이다. [`transforms`](http://pytorch.org/docs/master/torchvision/transforms.html) module은 `torchvision` 패키지의 서브모듈로 다양한 이미지 전처리 메소드를 제공한다. \nImageFolder는 다음과 같은 구조로 구성되어 있어야 한다.:\n```\nroot/dog/xxx.png\nroot/dog/xxy.png\nroot/dog/xxz.png\n\nroot/cat/123.png\nroot/cat/nsdf3.png\nroot/cat/asd932_.png\n```\n\n각각의 클래스의 이름으로 된 디렉토리가 있어야 한다. (예를 들면 `cat`, `dog`). 각 이미지의 label은 디렉토리의 이름과 같게 된다. 제공되는 Cat_Dog_data.zip 파일은 미리 train과 test로 나뉘어 있다.\n\n### Transforms\n\n`ImageFolder`로 사진 이미지를 읽어 들일 때 , 이미지 데이터를 신경망에서 처리할 수 있도록 적절하게 전처리 해야 한다. 일단 제각각인 사진의 크기를 같은 사이즈가 되도록 해야한다. \n- `transforms.Resize()` \n- `transforms.CenterCrop()`\n- `transforms.RandomResizedCrop()` 등\n\n`transforms.ToTensor()`로 이미지를 반드시 PyTorch tensors 로 변환해야 한다. 여러가지 변환은 `transforms.Compose()`로 묶어서 처리가 가능하다. \n\n해당 전처리는 순서대로 수행된다.:\n\n```python\ntransform = transforms.Compose([transforms.Resize(255),\n transforms.CenterCrop(224),\n transforms.ToTensor()])\n\n```\n\n참고) [documentation](http://pytorch.org/docs/master/torchvision/transforms.html). \n\n### Data Loaders\n\n`ImageFolder` 는 이미지를 전처리하여 데이터 셋으로 만든다. 이렇게 만들어진 이미지 데이터 셋을 [`DataLoader`](http://pytorch.org/docs/master/data.html#torch.utils.data.DataLoader)로 읽어들인다. `DataLoader`로 이미지와 이미지의 label을 읽어 들일 수 있다. shuffle하면 각 epoch 에서 데이터를 읽어 들이기 전에 이미지 데이터를 섞어준다.\n\n```python\ndataloader = torch.utils.data.DataLoader(dataset, batch_size=32, shuffle=True)\n```\n\n`dataloader` 는 iterator로 `next()`메소드로 for loop를 통해 반복적으로 읽어들인다.\n\n```python\n# Looping through it, get a batch on each loop \nfor images, labels in dataloader:\n pass\n\n# Get one batch\nimages, labels = next(iter(dataloader))\n```\n \n>**실습 :** `ImageFolder`로 `Cat_Dog_data/train` 폴더에서 이미지를 읽어 들여보시오. transforms을 정의하고 dataloader로 생성하시오.", "_____no_output_____" ] ], [ [ "data_dir = 'Cat_Dog_data/train'\n\ntransform = transforms.Compose([transforms.Resize(255),\n transforms.CenterCrop(224),\n transforms.ToTensor()])\ndataset = datasets.ImageFolder(data_dir, transform=transform)\ndataloader = torch.utils.data.DataLoader(dataset, batch_size=10, shuffle=True)", "_____no_output_____" ], [ "# Run this to test your data loader\nimages, labels = next(iter(dataloader))\nimage = images[0].numpy().transpose((1, 2, 0))\nplt.imshow(image)", "_____no_output_____" ] ], [ [ "## Data Augmentation\n\n이미지를 임의로 회전, 반전, 스케일 변환, crop등을 통해 다양하게 변환시킨다. 이렇게 이미지를 임의로 변형해서 신경망을 훈련하면 이미지 분류의 성능을 더 향상시킬수 있다. \n\n다음과 같이 transform을 수행할 수 있다.:\n\n```python\ntrain_transforms = transforms.Compose([transforms.RandomRotation(30),\n transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n transforms.Normalize([0.5, 0.5, 0.5], \n [0.5, 0.5, 0.5])])\n```\n\n `transforms.Normalize`로 이미지를 normalize 할 수 있다. means 과 standard deviations을 지정한다. \n \n```input[channel] = (input[channel] - mean[channel]) / std[channel]```\n\nNormalizing 을 하면 신경망의 학습이 더 잘 수행된다. \n\n\n>**실습 :** train data와 test data에 대해 transforms를 정의한다 (normalization 은 일단 제외).", "_____no_output_____" ] ], [ [ "data_dir = 'Cat_Dog_data'\n\n# TODO: Define transforms for the training data and testing data\ntrain_transforms = transforms.Compose([transforms.RandomRotation(30),\n transforms.CenterCrop(224),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor()]) \n\ntest_transforms = transforms.Compose([transforms.Resize(255),\n transforms.CenterCrop(224),\n transforms.ToTensor()])\n\n\n# Pass transforms in here, then run the next cell to see how the transforms look\ntrain_data = datasets.ImageFolder(data_dir + '/train', transform=train_transforms)\ntest_data = datasets.ImageFolder(data_dir + '/test', transform=test_transforms)\n\ntrainloader = torch.utils.data.DataLoader(train_data, batch_size=10, shuffle=True)\ntestloader = torch.utils.data.DataLoader(test_data, batch_size=10, shuffle=True)", "_____no_output_____" ], [ "# change this to the trainloader or testloader \nclass_name=['Cat', 'Dog']\ndata_iter = iter(trainloader)\n\nimages, labels = next(data_iter)\n\nfig, axes = plt.subplots(figsize=(10,4), ncols=4)\n\nfor ii in range(4):\n ax = axes[ii]\n image = images[ii].numpy().transpose((1,2,0))\n ax.set_title(class_name[labels[ii].numpy()])\n ax.imshow(image)", "_____no_output_____" ] ], [ [ "transform된 이미지를 확인해 보자.\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ] ]
cbfcbe3f506952b52a6b457be4dace7fc8ee2dc2
18,263
ipynb
Jupyter Notebook
__site/generated/notebooks/EX-powergen.ipynb
giordano/DataScienceTutorials.jl
8284298842e0d77061cf8ee767d0899fb7d051ff
[ "MIT" ]
null
null
null
__site/generated/notebooks/EX-powergen.ipynb
giordano/DataScienceTutorials.jl
8284298842e0d77061cf8ee767d0899fb7d051ff
[ "MIT" ]
null
null
null
__site/generated/notebooks/EX-powergen.ipynb
giordano/DataScienceTutorials.jl
8284298842e0d77061cf8ee767d0899fb7d051ff
[ "MIT" ]
null
null
null
27.136701
296
0.582763
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
cbfcc7bec5d87ed0e08b8cae04fdbaa39e1c92ed
940,854
ipynb
Jupyter Notebook
Beta Calculation/.ipynb_checkpoints/Bayesian Beta Computation with Wishart-checkpoint.ipynb
Jonathan-Lindbloom/bayesian-projects
5f67b6dcf0826b65943f26352d142f3139c9ffe8
[ "MIT" ]
null
null
null
Beta Calculation/.ipynb_checkpoints/Bayesian Beta Computation with Wishart-checkpoint.ipynb
Jonathan-Lindbloom/bayesian-projects
5f67b6dcf0826b65943f26352d142f3139c9ffe8
[ "MIT" ]
null
null
null
Beta Calculation/.ipynb_checkpoints/Bayesian Beta Computation with Wishart-checkpoint.ipynb
Jonathan-Lindbloom/bayesian-projects
5f67b6dcf0826b65943f26352d142f3139c9ffe8
[ "MIT" ]
null
null
null
761.208738
414,812
0.943486
[ [ [ "import numpy as np\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport pymc3 as pm\nfrom theano import tensor as T\nimport arviz\n\nimport os\nimport sys\n\nfrom jupyterthemes import jtplot\njtplot.style(theme=\"monokai\")", "_____no_output_____" ], [ "os.listdir()", "_____no_output_____" ], [ "lng = pd.read_csv(\"LNG.csv\", index_col=\"Date\")[[\"Adj Close\"]]\ndji = pd.read_csv(\"^DJI.csv\", index_col=\"Date\")[[\"Adj Close\"]]\nlng = lng.rename(columns={\"Adj Close\":\"LNG close\"})\ndji = dji.rename(columns={\"Adj Close\":\"DJI close\"})\nlng[\"LNG log return\"] = np.log(lng[\"LNG close\"]) - np.log(lng[\"LNG close\"].shift(1))\ndji[\"DJI log return\"] = np.log(dji[\"DJI close\"]) - np.log(dji[\"DJI close\"].shift(1))\nlng = lng.dropna()\ndji = dji.dropna()\ndf = pd.merge(lng, dji, left_index=True, right_index=True)\ndf.head()", "_____no_output_____" ], [ "plt.figure(figsize=(15,10))\nplt.scatter(df[\"DJI log return\"], df[\"LNG log return\"], alpha=0.5)\nplt.title(\"Log LNG returns vs. log DJI returns\")\nplt.xlabel(\"DJI log return\")\nplt.ylabel(\"LNG log return\")\nplt.grid()\nplt.show()", "_____no_output_____" ], [ "stacked = np.vstack([df[\"DJI log return\"].values, df[\"LNG log return\"].values])\nsample_cov = np.cov(stacked)[0][1]\nmkt_port_var = np.var(df[\"DJI log return\"].values)\nsample_beta = sample_cov/mkt_port_var\nprint(\"The sample estimate of beta is {}\".format(sample_beta))", "The sample estimate of beta is 0.9564616215625722\n" ] ], [ [ "# MCMC beta estimation, using the multivariate normal model for log returns", "_____no_output_____" ] ], [ [ "data = np.column_stack((df[\"DJI log return\"].values, df[\"LNG log return\"]))\n\nnum_samps = 50000\n\nwith pm.Model() as model:\n '''\n The code for this model is adapted from Austin Rochford's blog post, available here: https://austinrochford.com/posts/2015-09-16-mvn-pymc3-lkj.html\n '''\n sigma = pm.Lognormal('sigma', np.zeros(2), np.ones(2), shape=2)\n nu = pm.Uniform(\"nu\", 0, 5)\n C_triu = pm.LKJCorr('C_triu', nu, 2) \n \n C = pm.Deterministic('C', T.fill_diagonal(C_triu[np.zeros((2, 2), dtype=np.int64)], 1.))\n sigma_diag = pm.Deterministic('sigma_mat', T.nlinalg.diag(sigma))\n cov = pm.Deterministic('cov', T.nlinalg.matrix_dot(sigma_diag, C, sigma_diag))\n tau = pm.Deterministic('tau', T.nlinalg.matrix_inverse(cov))\n \n mu = pm.MvNormal('mu', 0, tau, shape=2)\n x_ = pm.MvNormal('x', mu, tau, observed=data)\n\n step = pm.Metropolis()\n trace_ = pm.sample(num_samps, step)", "Multiprocess sampling (4 chains in 4 jobs)\nCompoundStep\n>Metropolis: [mu]\n>Metropolis: [C_triu]\n>Metropolis: [nu]\n>Metropolis: [sigma]\nSampling 4 chains, 0 divergences: 100%|██████████| 202000/202000 [02:02<00:00, 1648.11draws/s]\nThe number of effective samples is smaller than 10% for some parameters.\n" ], [ "nburn = 5000\ntrace = trace_[nburn:]", "_____no_output_____" ], [ "pm.traceplot(trace)", "C:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\n" ], [ "# Compute matrix inverse directly\na11 = trace[\"cov\"][:, 0, 0]\na12 = trace[\"cov\"][:, 0, 1]\na21 = trace[\"cov\"][:, 1, 0]\na22 = trace[\"cov\"][:, 1, 1]\ntemp_matrices = np.array([[a22, -a12],[-a21, a11]])\nprefactor = 1.0/(a11*a22 - a12*a21)\ninv_matrices = prefactor*temp_matrices\nmkt_vars = inv_matrices[0,0,:]\ndji_lng_covs = inv_matrices[0,1,:]\nbetas = dji_lng_covs/mkt_vars", "_____no_output_____" ], [ "plt.figure(figsize=(15,10))\nplt.hist(betas, bins=50)\nplt.title(\"Posterior Samples of the Beta of LNG, using Multivariate Gaussian Model\")\nplt.savefig(\"lng_beta_normal.png\")\nplt.show()", "_____no_output_____" ], [ "print(np.array(betas).std())", "0.06599460790028769\n" ], [ "print(np.array(betas).mean())", "0.9496147678698746\n" ] ], [ [ "# MCMC beta estimation, using the multivariate student-t model for log returns", "_____no_output_____" ] ], [ [ "data = np.column_stack((df[\"DJI log return\"].values, df[\"LNG log return\"]))\n\nnum_samps = 50000\n\nwith pm.Model() as model:\n '''\n The code for this model is adapted from Austin Rochford's blog post, available here: https://austinrochford.com/posts/2015-09-16-mvn-pymc3-lkj.html\n '''\n sigma = pm.Lognormal('sigma', np.zeros(2), np.ones(2), shape=2)\n nu = pm.Uniform(\"nu\", 0, 5)\n C_triu = pm.LKJCorr('C_triu', nu, 2) \n \n C = pm.Deterministic('C', T.fill_diagonal(C_triu[np.zeros((2, 2), dtype=np.int64)], 1.))\n sigma_diag = pm.Deterministic('sigma_mat', T.nlinalg.diag(sigma))\n cov = pm.Deterministic('cov', T.nlinalg.matrix_dot(sigma_diag, C, sigma_diag))\n tau = pm.Deterministic('tau', T.nlinalg.matrix_inverse(cov))\n \n nu2 = pm.HalfNormal(\"nu2\", sigma=1)\n nu3 = 2.01 + nu2 # We assume support is roughly > 2\n mu = pm.MvStudentT('mu', nu=nu3, Sigma=tau, mu=0, shape=2)\n x_ = pm.MvStudentT('x', nu=nu3, Sigma=tau, mu=mu, observed=data)\n\n step = pm.Metropolis()\n trace_ = pm.sample(num_samps, step)", "Multiprocess sampling (4 chains in 4 jobs)\nCompoundStep\n>Metropolis: [mu]\n>Metropolis: [nu2]\n>Metropolis: [C_triu]\n>Metropolis: [nu]\n>Metropolis: [sigma]\nSampling 4 chains, 0 divergences: 100%|██████████| 202000/202000 [02:22<00:00, 1416.78draws/s]\nThe number of effective samples is smaller than 10% for some parameters.\n" ], [ "nburn = 5000\ntrace2 = trace_[nburn:]", "_____no_output_____" ], [ "pm.traceplot(trace2)", "C:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\nC:\\Users\\Jonathan\\anaconda3\\lib\\site-packages\\arviz\\plots\\backends\\matplotlib\\distplot.py:36: UserWarning: Argument backend_kwargs has not effect in matplotlib.plot_distSupplied value won't be used\n warnings.warn(\n" ], [ "# Compute matrix inverse directly\na11 = trace2[\"cov\"][:, 0, 0]\na12 = trace2[\"cov\"][:, 0, 1]\na21 = trace2[\"cov\"][:, 1, 0]\na22 = trace2[\"cov\"][:, 1, 1]\ntemp_matrices = np.array([[a22, -a12],[-a21, a11]])\nprefactor = 1.0/(a11*a22 - a12*a21)\ninv_matrices = prefactor*temp_matrices\nmkt_vars = inv_matrices[0,0,:]\ndji_lng_covs = inv_matrices[0,1,:]\nbetas = dji_lng_covs/mkt_vars", "_____no_output_____" ], [ "plt.figure(figsize=(15,10))\nplt.hist(betas, bins=50)\nplt.title(\"Posterior Samples of the Beta of LNG, using Multivariate Student T model\")\nplt.savefig(\"lng_beta_student_t.png\")\nplt.show()", "_____no_output_____" ], [ "print(np.array(betas).std())", "0.0898662682575286\n" ], [ "print(np.array(betas).mean())", "0.9409744403093506\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ] ]
cbfccccd34a2faad6138a0a8efde7d79e120c534
6,025
ipynb
Jupyter Notebook
jupyter/load_mxnet_model.ipynb
mothguib/djl
8c1f5cea54113c857fc2b64d2a56de1104b7b794
[ "Apache-2.0" ]
1
2020-02-11T03:38:38.000Z
2020-02-11T03:38:38.000Z
jupyter/load_mxnet_model.ipynb
mothguib/djl
8c1f5cea54113c857fc2b64d2a56de1104b7b794
[ "Apache-2.0" ]
null
null
null
jupyter/load_mxnet_model.ipynb
mothguib/djl
8c1f5cea54113c857fc2b64d2a56de1104b7b794
[ "Apache-2.0" ]
null
null
null
28.966346
216
0.593693
[ [ [ "# Load MXNet model\n\nIn this tutorial, you learn how to load an existing MXNet model and use it to run a prediction task.\n\n\n## Preparation\n\nThis tutorial requires the installation of Java Kernel. For more information on installing the Java Kernel, see the [README](https://github.com/awslabs/djl/blob/master/jupyter/README.md) to install Java Kernel.", "_____no_output_____" ] ], [ [ "%mavenRepo snapshots https://oss.sonatype.org/content/repositories/snapshots/\n\n%maven ai.djl:api:0.3.0-SNAPSHOT\n%maven ai.djl:repository:0.3.0-SNAPSHOT\n%maven ai.djl:model-zoo:0.3.0-SNAPSHOT\n%maven ai.djl.mxnet:mxnet-engine:0.3.0-SNAPSHOT\n%maven ai.djl.mxnet:mxnet-model-zoo:0.3.0-SNAPSHOT\n%maven org.slf4j:slf4j-api:1.7.26\n%maven org.slf4j:slf4j-simple:1.7.26\n%maven net.java.dev.jna:jna:5.3.0\n \n// See https://github.com/awslabs/djl/blob/master/mxnet/mxnet-engine/README.md\n// for more MXNet library selection options\n%maven ai.djl.mxnet:mxnet-native-auto:1.6.0-SNAPSHOT", "_____no_output_____" ], [ "import java.awt.image.*;\nimport java.nio.file.*;\nimport java.util.*;\nimport java.util.stream.*;\nimport ai.djl.*;\nimport ai.djl.inference.*;\nimport ai.djl.ndarray.*;\nimport ai.djl.ndarray.index.*;\nimport ai.djl.modality.*;\nimport ai.djl.modality.cv.*;\nimport ai.djl.modality.cv.util.*;\nimport ai.djl.modality.cv.transform.*;\nimport ai.djl.mxnet.zoo.*;\nimport ai.djl.translate.*;\nimport ai.djl.training.util.*;\nimport ai.djl.util.*;\nimport ai.djl.basicmodelzoo.cv.classification.*;", "_____no_output_____" ] ], [ [ "## Step 1: Prepare your MXNet model\n\nThis tutorial assumes that you have a MXNet model trained using Python. A MXNet symbolic model usually contains the following files:\n* Symbol file: {MODEL_NAME}-symbol.json - a json file that contains network information about the model\n* Parameters file: {MODEL_NAME}-{EPOCH}.params - a binary file that stores the parameter weight and bias\n* Synset file: synset.txt - an optional text file that stores classification classes labels\n\nThis tutorial uses a pre-trained MXNet `resnet18_v1` model.", "_____no_output_____" ], [ "We use [DownloadUtils.java] for downloading files from internet.", "_____no_output_____" ] ], [ [ "%load DownloadUtils.java", "_____no_output_____" ], [ "DownloadUtils.download(\"https://mlrepo.djl.ai/model/cv/image_classification/ai/djl/mxnet/resnet/0.0.1/resnet18_v1-symbol.json\", \"build/resnet/resnet18_v1-symbol.json\", new ProgressBar());\nDownloadUtils.download(\"https://mlrepo.djl.ai/model/cv/image_classification/ai/djl/mxnet/resnet/0.0.1/resnet18_v1-0000.params.gz\", \"build/resnet/resnet18_v1-0000.params\", new ProgressBar());\nDownloadUtils.download(\"https://mlrepo.djl.ai/model/cv/image_classification/ai/djl/mxnet/synset.txt\", \"build/resnet/synset.txt\", new ProgressBar());\n", "_____no_output_____" ] ], [ [ "## Step 2: Load your model", "_____no_output_____" ] ], [ [ "Path modelDir = Paths.get(\"build/resnet\");\nModel model = Model.newInstance();\nmodel.load(modelDir, \"resnet18_v1\");", "_____no_output_____" ] ], [ [ "## Step 3: Create a `Translator`", "_____no_output_____" ] ], [ [ "Pipeline pipeline = new Pipeline();\npipeline.add(new CenterCrop()).add(new Resize(224, 224)).add(new ToTensor());\nTranslator<BufferedImage, Classifications> translator = ImageClassificationTranslator.builder()\n .setPipeline(pipeline)\n .setSynsetArtifactName(\"synset.txt\")\n .build();", "_____no_output_____" ] ], [ [ "## Step 4: Load image for classification", "_____no_output_____" ] ], [ [ "var img = BufferedImageUtils.fromUrl(\"https://djl-ai.s3.amazonaws.com/resources/images/kitten.jpg\");\nimg", "_____no_output_____" ] ], [ [ "## Step 5: Run inference", "_____no_output_____" ] ], [ [ "Predictor<BufferedImage, Classifications> predictor = model.newPredictor(translator);\nClassifications classifications = predictor.predict(img);\n\nclassifications", "_____no_output_____" ] ], [ [ "## Summary\n\nNow, you can load any MXNet symbolic model and run inference.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cbfce478c64f33522baaee94f403357ab594ea67
41,651
ipynb
Jupyter Notebook
notebooks/Data_Prep/Data Prep - Export Raster to Points_Regular.ipynb
worldbank/Khyber-Pakhtunkhwa-Accessibility-Analysis
ac916514b1806c383cfe26f4622b000f01cffe59
[ "CC0-1.0" ]
null
null
null
notebooks/Data_Prep/Data Prep - Export Raster to Points_Regular.ipynb
worldbank/Khyber-Pakhtunkhwa-Accessibility-Analysis
ac916514b1806c383cfe26f4622b000f01cffe59
[ "CC0-1.0" ]
null
null
null
notebooks/Data_Prep/Data Prep - Export Raster to Points_Regular.ipynb
worldbank/Khyber-Pakhtunkhwa-Accessibility-Analysis
ac916514b1806c383cfe26f4622b000f01cffe59
[ "CC0-1.0" ]
null
null
null
42.029263
1,520
0.475019
[ [ [ "## Aligning rasters: A step-by-step breakdown", "_____no_output_____" ], [ "This notebook aligns input rasters with a base reference raster. The implict purpose, reflected in the datasets used here, is to align rasters so that raster math operations can be performed between the rasters", "_____no_output_____" ] ], [ [ "import os, sys\nimport re\nimport pprint\n# from pprint import pprint\n\nimport numpy as np\n\nimport rasterio\nfrom rasterio import features, transform\nfrom rasterio.mask import mask\nfrom rasterio.transform import Affine\nfrom rasterio.warp import calculate_default_transform, reproject, Resampling\n\nimport pandas as pd\nimport geopandas as gpd\n\nimport shapely\nfrom shapely.geometry import shape, box, Polygon", "_____no_output_____" ] ], [ [ "### Setup", "_____no_output_____" ], [ "Directories", "_____no_output_____" ] ], [ [ "geo_dir = r'P:\\PAK\\GEO'\ndata_dir = r'../../data'\n\nrast_dir = r'rast_inputs'\nvect_in_dir = r'vect_inputs'\nvect_out_dir = r'vect_out'\n\nrds_dir = r'roads'\ndest_dir = r'destinations'\nspeed_dir = r'speed'\nfric_dir = r'friction'\nacc_dir = r'access'", "_____no_output_____" ] ], [ [ "Projections", "_____no_output_____" ] ], [ [ "dest_crs = 'EPSG:32642'\ndcrs_int = int(re.findall('[0-9]+',dest_crs)[0])", "_____no_output_____" ] ], [ [ "### Load and process raster to points", "_____no_output_____" ], [ "Load in the base raster we are using as a template so we can match up exactly to its grid and cell size", "_____no_output_____" ] ], [ [ "rast_pth = os.path.join(geo_dir,r'Population/HRSL/kp_general_v15.tif')", "_____no_output_____" ], [ "import rasterio\nfrom rasterio import features", "_____no_output_____" ], [ "with rasterio.open(rast_pth, 'r') as src1:\n \n rast = src1.read(1).astype(np.float32)\n \n # populate geoms list\n\n results = (\n {'properties': {'POP': v}, 'geometry': s}\n for i, (s, v) \n in enumerate(\n rasterio.features.shapes(rast, transform=src1.transform)))\n\n geoms = list(results)\n\n # convert to GDF, clean up, and dissolve\n\n poly = gpd.GeoDataFrame.from_features(geoms)\n pts = poly.copy()\n pts.geometry = pts.geometry.centroid\n ", "_____no_output_____" ], [ "pts.dtypes", "_____no_output_____" ] ], [ [ "#### Set up dask cluster (if this is a lot points)", "_____no_output_____" ] ], [ [ "\nimport dask\nimport coiled\nfrom dask.distributed import Client, LocalCluster, Lock\nfrom dask.utils import SerializableLock\nimport dask.dataframe as dd\n\nfrom dask_control import *", "_____no_output_____" ], [ "client=get_dask_client(cluster_type='local',n_workers=2,processes=True,threads_per_worker=4)\nclient", "_____no_output_____" ] ], [ [ "#### Load in points data", "_____no_output_____" ] ], [ [ "# Load in points if needing to redo for some reason\npts = dd.read_csv(os.path.join(geo_dir,'Population/HRSL/pak_general_v15_pts.csv'),\n na_values = ' ',\n blocksize='100mb'\n )", "_____no_output_____" ], [ "# pts = gpd.GeoDataFrame(pts, geometry = gpd.points_from_xy(x=pts.lon_4326,y=pts.lat_4326)).set_crs(\"EPSG:4326\")", "_____no_output_____" ] ], [ [ "#### Clip to desired extent", "_____no_output_____" ], [ "Load in KP as clipping object", "_____no_output_____" ] ], [ [ "kp = gpd.read_file(os.path.join(geo_dir,'Boundaries/OCHA/pak_admbnda_adm1_ocha_pco_gaul_20181218.shp'))\nkp = kp[kp['ADM1_EN'] == 'Khyber Pakhtunkhwa']\nkp = kp.to_crs(dest_crs)\n\n# Buffer the polygon by 20km so we take in nearby markets and roads that may be used\n# kp.geometry = kp.buffer(20000)", "_____no_output_____" ], [ "kp = kp.to_crs(4326)", "_____no_output_____" ], [ "# pts = pts.to_crs(4326)\n# pts['lon_4326'] = pts.geometry.x\n# pts['lat_4326'] = pts.geometry.y", "_____no_output_____" ], [ "# pts = pts.to_crs(32642)\n# pts['lon_32642'] = pts.geometry.x\n# pts['lat_32642'] = pts.geometry.y", "_____no_output_____" ], [ "# kp_pts = gpd.clip(pts,kp)", "_____no_output_____" ], [ "def clip_pts(df, polys):\n # Join using 4326\n # Convert to GDF\n if isinstance(polys, gpd.GeoDataFrame) == False:\n polys = polys.result()\n gdf = gpd.GeoDataFrame(\n df, \n geometry=gpd.points_from_xy(df.lon_4326, df.lat_4326)\n ).set_crs(\"EPSG:4326\")\n \n # Clip by extent\n gdf = gpd.clip(gdf, polys)\n \n df = pd.DataFrame(gdf.drop('geometry', axis=1))\n \n \n return df\n ", "_____no_output_____" ], [ "# Broadcast adm3\nkp_dist = client.scatter(kp, broadcast=True)", "_____no_output_____" ], [ "# Distributed clip\nkp_pts = pts.map_partitions(clip_pts, kp_dist)", "_____no_output_____" ], [ "len(kp_pts)", "_____no_output_____" ], [ "kp_pts", "_____no_output_____" ], [ "kp_pts.dtypes", "_____no_output_____" ], [ "kp_pts.head()", "_____no_output_____" ] ], [ [ "#### Export", "_____no_output_____" ] ], [ [ "# pts.drop('geometry',axis=1).to_csv(os.path.join(geo_dir,'Population/HRSL/pak_general_v15_pts.csv'))\nkp_pts.drop('geometry',axis=1).to_csv(os.path.join(geo_dir,'Population/HRSL/kp_general_v15_pts.csv'))", "_____no_output_____" ], [ "pts.to_crs(4326).to_file(os.path.join(geo_dir,'Population/HRSL/pak_general_v15_pts.gpkg'),layer=\"pak_general_v15_4326\",driver='GPKG')", "_____no_output_____" ], [ "pts.to_crs(dcrs_int).to_file(os.path.join(geo_dir,'Population/HRSL/pak_general_v15_pts.gpkg'),layer=f\"pak_general_v15_{dcrs_int}\",driver='GPKG')", "_____no_output_____" ], [ "kp_pts.to_crs(4326).to_file(os.path.join(geo_dir,'Population/HRSL/kp_hrsl_v15_pts.gpkg'),layer=\"kp_general_v15_4326\",driver='GPKG')", "_____no_output_____" ], [ "kp_pts.to_crs(dcrs_int).to_file(os.path.join(geo_dir,'Population/HRSL/kp_hrsl_v15_pts.gpkg'),layer=f\"kp_general_v15_{dcrs_int}\",driver='GPKG')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cbfd02a9a747b5c38dad3dcac970c4240fce27da
17,696
ipynb
Jupyter Notebook
deep-learning-v2-pytorch/intro-to-pytorch/Part 1 - Tensors in PyTorch (Exercises).ipynb
iamdamilola/deep_learning
63ec48a8d9ad1e5acd8cb12caa5123e8e08a1545
[ "MIT" ]
null
null
null
deep-learning-v2-pytorch/intro-to-pytorch/Part 1 - Tensors in PyTorch (Exercises).ipynb
iamdamilola/deep_learning
63ec48a8d9ad1e5acd8cb12caa5123e8e08a1545
[ "MIT" ]
3
2021-08-25T14:41:59.000Z
2022-02-09T23:59:00.000Z
deep-learning-v2-pytorch/intro-to-pytorch/Part 1 - Tensors in PyTorch (Exercises).ipynb
damisparks/ai_learning
63ec48a8d9ad1e5acd8cb12caa5123e8e08a1545
[ "MIT" ]
null
null
null
37.333333
674
0.596632
[ [ [ "# Introduction to Deep Learning with PyTorch\n\nIn this notebook, you'll get introduced to [PyTorch](http://pytorch.org/), a framework for building and training neural networks. PyTorch in a lot of ways behaves like the arrays you love from Numpy. These Numpy arrays, after all, are just tensors. PyTorch takes these tensors and makes it simple to move them to GPUs for the faster processing needed when training neural networks. It also provides a module that automatically calculates gradients (for backpropagation!) and another module specifically for building neural networks. All together, PyTorch ends up being more coherent with Python and the Numpy/Scipy stack compared to TensorFlow and other frameworks.\n\n", "_____no_output_____" ], [ "## Neural Networks\n\nDeep Learning is based on artificial neural networks which have been around in some form since the late 1950s. The networks are built from individual parts approximating neurons, typically called units or simply \"neurons.\" Each unit has some number of weighted inputs. These weighted inputs are summed together (a linear combination) then passed through an activation function to get the unit's output.\n\n<img src=\"assets/simple_neuron.png\" width=400px>\n\nMathematically this looks like: \n\n$$\n\\begin{align}\ny &= f(w_1 x_1 + w_2 x_2 + b) \\\\\ny &= f\\left(\\sum_i w_i x_i +b \\right)\n\\end{align}\n$$\n\nWith vectors this is the dot/inner product of two vectors:\n\n$$\nh = \\begin{bmatrix}\nx_1 \\, x_2 \\cdots x_n\n\\end{bmatrix}\n\\cdot \n\\begin{bmatrix}\n w_1 \\\\\n w_2 \\\\\n \\vdots \\\\\n w_n\n\\end{bmatrix}\n$$", "_____no_output_____" ], [ "## Tensors\n\nIt turns out neural network computations are just a bunch of linear algebra operations on *tensors*, a generalization of matrices. A vector is a 1-dimensional tensor, a matrix is a 2-dimensional tensor, an array with three indices is a 3-dimensional tensor (RGB color images for example). The fundamental data structure for neural networks are tensors and PyTorch (as well as pretty much every other deep learning framework) is built around tensors.\n\n<img src=\"assets/tensor_examples.svg\" width=600px>\n\nWith the basics covered, it's time to explore how we can use PyTorch to build a simple neural network.", "_____no_output_____" ] ], [ [ "# First, import PyTorch\nimport torch", "_____no_output_____" ], [ "def activation(x):\n \"\"\" Sigmoid activation function \n \n Arguments\n ---------\n x: torch.Tensor\n \"\"\"\n return 1/(1+torch.exp(-x))", "_____no_output_____" ], [ "### Generate some data\ntorch.manual_seed(7) # Set the random seed so things are predictable\n\n# Features are 3 random normal variables\nfeatures = torch.randn((1, 5))\n# True weights for our data, random normal variables again\nweights = torch.randn_like(features)\n# and a true bias term\nbias = torch.randn((1, 1))", "_____no_output_____" ] ], [ [ "Above I generated data we can use to get the output of our simple network. This is all just random for now, going forward we'll start using normal data. Going through each relevant line:\n\n`features = torch.randn((1, 5))` creates a tensor with shape `(1, 5)`, one row and five columns, that contains values randomly distributed according to the normal distribution with a mean of zero and standard deviation of one. \n\n`weights = torch.randn_like(features)` creates another tensor with the same shape as `features`, again containing values from a normal distribution.\n\nFinally, `bias = torch.randn((1, 1))` creates a single value from a normal distribution.\n\nPyTorch tensors can be added, multiplied, subtracted, etc, just like Numpy arrays. In general, you'll use PyTorch tensors pretty much the same way you'd use Numpy arrays. They come with some nice benefits though such as GPU acceleration which we'll get to later. For now, use the generated data to calculate the output of this simple single layer network. \n> **Exercise**: Calculate the output of the network with input features `features`, weights `weights`, and bias `bias`. Similar to Numpy, PyTorch has a [`torch.sum()`](https://pytorch.org/docs/stable/torch.html#torch.sum) function, as well as a `.sum()` method on tensors, for taking sums. Use the function `activation` defined above as the activation function.", "_____no_output_____" ] ], [ [ "## Calculate the output of this network using the weights and bias tensors\n# Output of the network (prediction) formula\noutput = activation(torch.sum(features * weights) + bias)\noutput", "_____no_output_____" ] ], [ [ "You can do the multiplication and sum in the same operation using a matrix multiplication. In general, you'll want to use matrix multiplications since they are more efficient and accelerated using modern libraries and high-performance computing on GPUs.\n\nHere, we want to do a matrix multiplication of the features and the weights. For this we can use [`torch.mm()`](https://pytorch.org/docs/stable/torch.html#torch.mm) or [`torch.matmul()`](https://pytorch.org/docs/stable/torch.html#torch.matmul) which is somewhat more complicated and supports broadcasting. If we try to do it with `features` and `weights` as they are, we'll get an error\n\n```python\n>> torch.mm(features, weights)\n\n---------------------------------------------------------------------------\nRuntimeError Traceback (most recent call last)\n<ipython-input-13-15d592eb5279> in <module>()\n----> 1 torch.mm(features, weights)\n\nRuntimeError: size mismatch, m1: [1 x 5], m2: [1 x 5] at /Users/soumith/minicondabuild3/conda-bld/pytorch_1524590658547/work/aten/src/TH/generic/THTensorMath.c:2033\n```\n\nAs you're building neural networks in any framework, you'll see this often. Really often. What's happening here is our tensors aren't the correct shapes to perform a matrix multiplication. Remember that for matrix multiplications, the number of columns in the first tensor must equal to the number of rows in the second column. Both `features` and `weights` have the same shape, `(1, 5)`. This means we need to change the shape of `weights` to get the matrix multiplication to work.\n\n**Note:** To see the shape of a tensor called `tensor`, use `tensor.shape`. If you're building neural networks, you'll be using this method often.\n\nThere are a few options here: [`weights.reshape()`](https://pytorch.org/docs/stable/tensors.html#torch.Tensor.reshape), [`weights.resize_()`](https://pytorch.org/docs/stable/tensors.html#torch.Tensor.resize_), and [`weights.view()`](https://pytorch.org/docs/stable/tensors.html#torch.Tensor.view).\n\n* `weights.reshape(a, b)` will return a new tensor with the same data as `weights` with size `(a, b)` sometimes, and sometimes a clone, as in it copies the data to another part of memory.\n* `weights.resize_(a, b)` returns the same tensor with a different shape. However, if the new shape results in fewer elements than the original tensor, some elements will be removed from the tensor (but not from memory). If the new shape results in more elements than the original tensor, new elements will be uninitialized in memory. Here I should note that the underscore at the end of the method denotes that this method is performed **in-place**. Here is a great forum thread to [read more about in-place operations](https://discuss.pytorch.org/t/what-is-in-place-operation/16244) in PyTorch.\n* `weights.view(a, b)` will return a new tensor with the same data as `weights` with size `(a, b)`.\n\nI usually use `.view()`, but any of the three methods will work for this. So, now we can reshape `weights` to have five rows and one column with something like `weights.view(5, 1)`.\n\n> **Exercise**: Calculate the output of our little network using matrix multiplication.", "_____no_output_____" ] ], [ [ "## Calculate the output of this network using matrix multiplication\ny = activation(torch.mm(features, weights.view(5, 1)) + bias)\ny", "_____no_output_____" ] ], [ [ "### Stack them up!\n\nThat's how you can calculate the output for a single neuron. The real power of this algorithm happens when you start stacking these individual units into layers and stacks of layers, into a network of neurons. The output of one layer of neurons becomes the input for the next layer. With multiple input units and output units, we now need to express the weights as a matrix.\n\n<img src='assets/multilayer_diagram_weights.png' width=450px>\n\nThe first layer shown on the bottom here are the inputs, understandably called the **input layer**. The middle layer is called the **hidden layer**, and the final layer (on the right) is the **output layer**. We can express this network mathematically with matrices again and use matrix multiplication to get linear combinations for each unit in one operation. For example, the hidden layer ($h_1$ and $h_2$ here) can be calculated \n\n$$\n\\vec{h} = [h_1 \\, h_2] = \n\\begin{bmatrix}\nx_1 \\, x_2 \\cdots \\, x_n\n\\end{bmatrix}\n\\cdot \n\\begin{bmatrix}\n w_{11} & w_{12} \\\\\n w_{21} &w_{22} \\\\\n \\vdots &\\vdots \\\\\n w_{n1} &w_{n2}\n\\end{bmatrix}\n$$\n\nThe output for this small network is found by treating the hidden layer as inputs for the output unit. The network output is expressed simply\n\n$$\ny = f_2 \\! \\left(\\, f_1 \\! \\left(\\vec{x} \\, \\mathbf{W_1}\\right) \\mathbf{W_2} \\right)\n$$", "_____no_output_____" ] ], [ [ "### Generate some data\ntorch.manual_seed(7) # Set the random seed so things are predictable\n\n# Features are 3 random normal variables\nfeatures = torch.randn((1, 3))\n\n# Define the size of each layer in our network\nn_input = features.shape[1] # Number of input units, must match number of input features\nn_hidden = 2 # Number of hidden units \nn_output = 1 # Number of output units\n\n# Weights for inputs to hidden layer\nW1 = torch.randn(n_input, n_hidden)\n# Weights for hidden layer to output layer\nW2 = torch.randn(n_hidden, n_output)\n\n# and bias terms for hidden and output layers\nB1 = torch.randn((1, n_hidden))\nB2 = torch.randn((1, n_output))", "_____no_output_____" ] ], [ [ "> **Exercise:** Calculate the output for this multi-layer network using the weights `W1` & `W2`, and the biases, `B1` & `B2`. ", "_____no_output_____" ] ], [ [ "## Output for multilayer network. \nhidden_layer = activation(torch.mm(features, W1) + B1)\noutput_layer = activation(torch.mm(hidden_layer, W2) + B2)\noutput_layer", "_____no_output_____" ] ], [ [ "If you did this correctly, you should see the output `tensor([[ 0.3171]])`.\n\nThe number of hidden units a parameter of the network, often called a **hyperparameter** to differentiate it from the weights and biases parameters. As you'll see later when we discuss training a neural network, the more hidden units a network has, and the more layers, the better able it is to learn from data and make accurate predictions.", "_____no_output_____" ], [ "## Numpy to Torch and back\n\nSpecial bonus section! PyTorch has a great feature for converting between Numpy arrays and Torch tensors. To create a tensor from a Numpy array, use `torch.from_numpy()`. To convert a tensor to a Numpy array, use the `.numpy()` method.", "_____no_output_____" ] ], [ [ "import numpy as np\na = np.random.rand(4,3)\na", "_____no_output_____" ], [ "b = torch.from_numpy(a)\nb", "_____no_output_____" ], [ "b.numpy()", "_____no_output_____" ] ], [ [ "The memory is shared between the Numpy array and Torch tensor, so if you change the values in-place of one object, the other will change as well.", "_____no_output_____" ] ], [ [ "# Multiply PyTorch Tensor by 2, in place\nb.mul_(2)", "_____no_output_____" ], [ "# Numpy array matches new values from Tensor\na", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
cbfd03acaea01db10081426c0e73e677a9499cf2
2,820
ipynb
Jupyter Notebook
examples/reference/containers/bokeh/Overlay.ipynb
ppwadhwa/holoviews
e8e2ec08c669295479f98bb2f46bbd59782786bf
[ "BSD-3-Clause" ]
864
2019-11-13T08:18:27.000Z
2022-03-31T13:36:13.000Z
examples/reference/containers/bokeh/Overlay.ipynb
ppwadhwa/holoviews
e8e2ec08c669295479f98bb2f46bbd59782786bf
[ "BSD-3-Clause" ]
1,117
2019-11-12T16:15:59.000Z
2022-03-30T22:57:59.000Z
examples/reference/containers/bokeh/Overlay.ipynb
ppwadhwa/holoviews
e8e2ec08c669295479f98bb2f46bbd59782786bf
[ "BSD-3-Clause" ]
180
2019-11-19T16:44:44.000Z
2022-03-28T22:49:18.000Z
34.390244
433
0.601064
[ [ [ "<div class=\"contentcontainer med left\" style=\"margin-left: -50px;\">\n<dl class=\"dl-horizontal\">\n <dt>Title</dt> <dd>Overlay Container</dd>\n <dt>Dependencies</dt> <dd>Bokeh</dd>\n <dt>Backends</dt> <dd><a href='../bokeh/Overlay.ipynb'>Bokeh</a></dd> <dd><a href='../matplotlib/Overlay.ipynb'>Matplotlib</a></dd> <dd><a href='../plotly/Overlay.ipynb'>Plotly</a></dd>\n</dl>\n</div>", "_____no_output_____" ] ], [ [ "import numpy as np\nimport holoviews as hv\nhv.extension('bokeh')", "_____no_output_____" ] ], [ [ "A Overlay is a collection of HoloViews objects that are related in some way, to be displayed simultanously, overlaid in the same space. Like [``Layout``](./Layout.ipynb) and unlike other containers such as [``HoloMap``](./HoloMap.ipynb) , [``GridSpace``](./GridSpace.ipynb) and [``NdOverlay``](./NdOverlay.ipynb) a ``Overlay`` is *not* dictionary like: it holds potentially heterogeneous types without any dimensioned keys.\n\n\nA ``Overlay`` cannot contain any other container type other than ``NdOverlay`` but can contain any HoloViews elements. See [Building Composite Objects](../../../user_guide/06-Building_Composite_Objects.ipynb) for more details on how to compose containers. It is best to learn about ``Overlay`` and [``Layout``](./Layout.ipynb) together as they are very closely related objects that share many core concepts.", "_____no_output_____" ], [ "### ``Overlay`` is a heterogeneous collection", "_____no_output_____" ], [ "You can build a ``Overlay`` between any two HoloViews objects (which can have different types) using the ``*`` operator:", "_____no_output_____" ] ], [ [ "xvals = [0.1* i for i in range(100)]\ncurve = hv.Curve((xvals, [np.sin(x) for x in xvals]))\nscatter = hv.Scatter((xvals[::5], np.linspace(0,1,20)))\ncurve * scatter", "_____no_output_____" ] ], [ [ "In this example, we have a ``Overlay`` composed of a ``Curve`` element and a ``Scatter`` element.\n\nFor more information about both ``Overlay`` and ``Layout``, see the [Composing_Elements](../../../user_guide/02-Composing_Elements.ipynb) user guide.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ] ]
cbfd0642d2b13d98a6a64599a46fa60abc7842af
119,522
ipynb
Jupyter Notebook
climate_starter.ipynb
CoryOpie/Weather-Data-
c04658f9aa6622771af5b765d95fc21bb1c24ee8
[ "ADSL" ]
null
null
null
climate_starter.ipynb
CoryOpie/Weather-Data-
c04658f9aa6622771af5b765d95fc21bb1c24ee8
[ "ADSL" ]
null
null
null
climate_starter.ipynb
CoryOpie/Weather-Data-
c04658f9aa6622771af5b765d95fc21bb1c24ee8
[ "ADSL" ]
null
null
null
112.121951
35,688
0.846363
[ [ [ "%matplotlib inline\nfrom matplotlib import style\nstyle.use('fivethirtyeight')\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "import numpy as np\nimport pandas as pd\nfrom scipy import stats\nfrom scipy.stats import ttest_ind, ttest_ind_from_stats", "_____no_output_____" ], [ "import datetime as dt\nfrom datetime import datetime,timedelta\nfrom itertools import chain", "_____no_output_____" ] ], [ [ "# Reflect Tables into SQLAlchemy ORM", "_____no_output_____" ] ], [ [ "# Python SQL toolkit and Object Relational Mapper\nimport sqlalchemy\nfrom sqlalchemy.ext.automap import automap_base\nfrom sqlalchemy.orm import Session\nfrom sqlalchemy import create_engine, func, inspect", "_____no_output_____" ], [ "engine = create_engine(\"sqlite:///Resources/hawaii.sqlite\")", "_____no_output_____" ], [ "# reflect an existing database into a new model\nbase = automap_base()\n# reflect the tables\nbase.prepare(engine, reflect=True)", "_____no_output_____" ], [ "# We can view all of the classes that automap found\nbase.classes.keys()", "_____no_output_____" ], [ "# Save references to each table\nmeasurement = base.classes.measurement\nstation = base.classes.station", "_____no_output_____" ], [ "#check columns in each table\ninspector = inspect(engine)\ncolumns = inspector.get_columns(\"measurement\")\n\nfor column in columns:\n print(column[\"name\"], column[\"type\"])", "id INTEGER\nstation TEXT\ndate TEXT\nprcp FLOAT\ntobs FLOAT\n" ], [ "#check columns in each table\ninspector = inspect(engine)\ncolumns = inspector.get_columns(\"station\")\n\nfor column in columns:\n print(column[\"name\"], column[\"type\"])", "id INTEGER\nstation TEXT\nname TEXT\nlatitude FLOAT\nlongitude FLOAT\nelevation FLOAT\n" ] ], [ [ "# Exploratory Climate Analysis", "_____no_output_____" ] ], [ [ "# Create our session (link) from Python to the DB\nsession = Session(engine)", "_____no_output_____" ], [ "# Calculate the date 1 year ago from the last data point in the database\n#need last year before precip data\nlast_date = (engine.execute(\"SELECT date FROM measurement ORDER BY date DESC\").first())[0]\nprint(last_date)\n\nyear, month, day = map(int, last_date.split(\"-\"))\nyear_ago = dt.datetime(year, month, day) - timedelta(365)\n# year_ago = year_ago.strftime(\"%Y-%m-%d\")\nprint(year_ago)\n\n", "2017-08-23\n2016-08-23 00:00:00\n" ], [ "# Design a query to retrieve the last 12 months of precipitation data and plot the results\n\nlast_year_precip = session.query(measurement.date, measurement.prcp).filter(measurement.date >= year_ago).all()\n# print(last_year_precip)\n \n\n# Perform a query to retrieve the data and precipitation scores\ndate = [row[0] for row in last_year_precip]\nprecip = [row[1] for row in last_year_precip]\n\n# Save the query results as a Pandas DataFrame and set the index to the date column\nclimate_df = pd.DataFrame({\"Date\" : date,\n \"Precipitation\" : precip}).set_index(\"Date\")\n# climate_df\n\n\n# Sort the dataframe by date\nclimate_df = climate_df.sort_values(\"Date\")\n# climate_df.head()\n\n# Use Pandas Plotting with Matplotlib to plot the data\nclimate_df.plot(figsize = (10,5))\nplt.xlabel(\"Date\")\n# plt.tick_params(\n# axis=\"x\",\n# which=\"both\",\n# labelbottom=False)\nplt.legend(loc = \"best\")\nplt.show()\n", "_____no_output_____" ], [ "# Use Pandas to calcualte the summary statistics for the precipitation data\nclimate_df.describe()", "_____no_output_____" ], [ "# Design a query to show how many stations are available in this dataset?\nsession.query(func.count(station.name)).all()", "_____no_output_____" ], [ "# What are the most active stations? (i.e. what stations have the most rows)?\n# List the stations and the counts in descending order.\nengine.execute(\"SELECT station, count(station) AS count FROM measurement GROUP BY station ORDER BY count desc\").fetchall()", "_____no_output_____" ], [ "# Using the station id from the previous query, calculate the lowest temperature recorded, \n# highest temperature recorded, and average temperature of the most active station?\nengine.execute(\"SELECT min(tobs), max(tobs), avg(tobs) FROM measurement WHERE station = 'USC00519281'\").fetchall()", "_____no_output_____" ], [ "# Choose the station with the highest number of temperature observations.\n# Query the last 12 months of temperature observation data for this station and plot the results as a histogram\ndata = engine.execute(\"SELECT tobs FROM measurement WHERE date >= '2016-8-23' AND station = 'USC00519281'\").fetchall()\ndata = [row[0] for row in data]\nhist_data = pd.DataFrame({\"tobs\": data})\nhist_data.head()", "_____no_output_____" ], [ "hist = hist_data.hist(bins = 12, figsize = (10, 5))\nplt.ylabel(\"Frequency\")\nplt.xlabel(\"Temperature\")\nplt.title(\"\")\nplt.legend([\"tobs\"])\nplt.show()", "_____no_output_____" ] ], [ [ "## Bonus Challenge Assignment", "_____no_output_____" ] ], [ [ "#Average June Temps vs Average December Temps\njune = \"06\"\njune_temp = session.query(measurement.tobs).filter(func.strftime(\"%m\", measurement.date) == june).all()\n\ndec = \"06\"\ndec_temp = session.query(measurement.tobs).filter(func.strftime(\"%m\", measurement.date) == dec).all()\n\n\n#ttest\nstats.ttest_ind(june_temp, dec_temp, equal_var=False)", "_____no_output_____" ], [ "# This function called `calc_temps` will accept start date and end date in the format '%Y-%m-%d' \n# and return the minimum, average, and maximum temperatures for that range of dates\ndef calc_temps(start_date, end_date):\n \"\"\"TMIN, TAVG, and TMAX for a list of dates.\n \n Args:\n start_date (string): A date string in the format %Y-%m-%d\n end_date (string): A date string in the format %Y-%m-%d\n \n Returns:\n TMIN, TAVE, and TMAX\n \"\"\"\n \n return session.query(func.min(measurement.tobs), func.avg(measurement.tobs), func.max(measurement.tobs)).\\\n filter(measurement.date >= start_date).filter(measurement.date <= end_date).all()\n\n# function usage example\nprint(calc_temps('2012-02-28', '2012-03-05'))", "[(62.0, 69.57142857142857, 74.0)]\n" ], [ "# Use your previous function `calc_temps` to calculate the tmin, tavg, and tmax \n# for your trip using the previous year's data for those same dates.\nmy_trip = (calc_temps(\"2017-08-10\", \"2017-08-20\"))\nprint(my_trip)", "[(70.0, 78.42222222222222, 85.0)]\n" ], [ "my_trip_df = pd.DataFrame(my_trip, columns = [\"min\", \"avg\", \"max\"])\nmy_trip_df", "_____no_output_____" ], [ "# Plot the results from your previous query as a bar chart. \n# Use \"Trip Avg Temp\" as your Title\n# Use the average temperature for the y value\n# Use the peak-to-peak (tmax-tmin) value as the y error bar (yerr)\nerror = [my_trip_df[\"max\"] - my_trip_df[\"min\"]]\nmy_trip_df.plot(kind=\"bar\", y=\"avg\", yerr=error, title = \"Trip Avg Temp\", color=\"blue\", figsize= (6,4), legend=\"\")\nplt.ylabel(\"Temp (F)\")\nplt.tick_params(\n axis=\"x\",\n which=\"both\",\n labelbottom=False)", "_____no_output_____" ], [ "# Calculate the total amount of rainfall per weather station for your trip dates using the previous year's matching dates.\n# Sort this in descending order by precipitation amount and list the station, name, latitude, longitude, and elevation\nengine.execute(\"SELECT measurement.station, name, latitude, longitude, elevation, sum(prcp) AS total_rainfall \\\n FROM measurement \\\n JOIN station ON measurement.station == station.station \\\n WHERE date BETWEEN '2017-08-10' AND '2017-08-20' \\\n GROUP BY measurement.station ORDER BY total_rainfall DESC\").fetchall()\n", "_____no_output_____" ], [ "# Create a query that will calculate the daily normals \n# (i.e. the averages for tmin, tmax, and tavg for all historic data matching a specific month and day)\n\ndef daily_normals(date):\n \"\"\"Daily Normals.\n \n Args:\n date (str): A date string in the format '%m-%d'\n \n Returns:\n A list of tuples containing the daily normals, tmin, tavg, and tmax\n \n \"\"\"\n \n sel = [func.min(measurement.tobs), func.avg(measurement.tobs), func.max(measurement.tobs)]\n return session.query(*sel).filter(func.strftime(\"%m-%d\", measurement.date) == date).all()\n \ndaily_normals(\"01-01\")", "_____no_output_____" ], [ "# calculate the daily normals for your trip\n# Set the start and end date of the trip\nstart_date = \"2017-08-10\"\nend_date =\"2017-08-20\"\n\n# Use the start and end date to create a range of dates\nvacay_dates = pd.date_range(start_date, end_date).strftime(\"%Y-%m-%d\")\n# vacay_dates\n\n# Stip off the year and save a list of %m-%d strings\nvacay_dates = pd.date_range(start_date, end_date).strftime(\"%m-%d\")\nvacay_dates\n\n\n\n# Loop through the list of %m-%d strings and calculate the normals for each date\n# push each tuple of calculations into a list called `normals`\nnormals = []\nfor date in vacay_dates:\n normal = daily_normals(date)\n normals.append(normal)\n \nnormals", "_____no_output_____" ], [ "# Load the previous query results into a Pandas DataFrame and \nnew_list = [x for x in chain.from_iterable(normals)]\n# new_list\n\nmy_trip_df = pd.DataFrame(new_list, columns = [\"tmin\", \"tavg\", \"tmax\"])\n# my_trip_df\n\n# add the `trip_dates` range as the `date` index\nmy_trip_df[\"date\"] = vacay_dates\nmy_trip_df = my_trip_df.set_index(\"date\")\nmy_trip_df\n\n", "_____no_output_____" ], [ "# Plot the daily normals as an area plot with `stacked=False`\nmy_trip_df.plot(kind = \"area\", stacked=False, figsize = (10,5))\nplt.xticks(rotation= 45)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfd179c28b460d5a23d330ef0f58f3a31e01687
9,635
ipynb
Jupyter Notebook
docs/notebooks/atomic/windows/credential_access/SDWIN-190625103712.ipynb
onesorzer0es/Security-Datasets
6a0eec7d9a2ec6026c6ba239ad647c4f59d2a6ef
[ "MIT" ]
294
2020-08-27T01:41:47.000Z
2021-06-28T00:17:15.000Z
docs/notebooks/atomic/windows/credential_access/SDWIN-190625103712.ipynb
onesorzer0es/Security-Datasets
6a0eec7d9a2ec6026c6ba239ad647c4f59d2a6ef
[ "MIT" ]
18
2020-09-01T14:51:13.000Z
2021-06-22T14:12:04.000Z
docs/notebooks/atomic/windows/credential_access/SDWIN-190625103712.ipynb
onesorzer0es/Security-Datasets
6a0eec7d9a2ec6026c6ba239ad647c4f59d2a6ef
[ "MIT" ]
48
2020-08-31T07:30:05.000Z
2021-06-28T00:17:37.000Z
32.22408
303
0.544369
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
cbfd24532e2caed8b04cc46aaaf33e1030351f25
299,695
ipynb
Jupyter Notebook
RGB-HSI.ipynb
jrodrigopuca/procesamiento-img
52078b80e88b548f4d034cc931deb711f4d0edbb
[ "MIT" ]
null
null
null
RGB-HSI.ipynb
jrodrigopuca/procesamiento-img
52078b80e88b548f4d034cc931deb711f4d0edbb
[ "MIT" ]
null
null
null
RGB-HSI.ipynb
jrodrigopuca/procesamiento-img
52078b80e88b548f4d034cc931deb711f4d0edbb
[ "MIT" ]
null
null
null
643.122318
145,200
0.942935
[ [ [ "import numpy as np\nimport cv2\nimport matplotlib.pyplot as plt\nimport math", "_____no_output_____" ], [ "def rgb2hsi(rgb):\n # separar\n R,G,B= cv2.split(rgb)\n # normalizar\n R =R/255\n G =G/255\n B =B/255\n # cantidad de elementos\n x=R.shape[0]\n y=R.shape[1]\n # crear arrays\n r=np.empty([x,y])\n g=np.empty([x,y])\n b=np.empty([x,y])\n H=np.empty([x,y])\n S=np.empty([x,y])\n I=np.empty([x,y])\n\n # recorrer\n for i in range(0, x):\n for j in range(0,y):\n # calcular rgb\n divisor=R[i,j]+G[i,j]+B[i,j]\n I[i,j]=divisor/3.0\n if (divisor != 0.0):\n r[i,j]=R[i,j]/divisor\n g[i,j]=G[i,j]/divisor\n b[i,j]=B[i,j]/divisor\n \n # calcular RGB\n if (R[i,j]==G[i,j]) and (G[i,j]==B[i,j]):\n H[i,j]=0\n S[i,j]=0\n else:\n argum=(R[i,j]-G[i,j])*(R[i,j]-G[i,j])+(R[i,j]-B[i,j])*(G[i,j]-B[i,j])\n num=0.5*((R[i,j]-G[i,j]) + (R[i,j]-B[i,j]))\n w=num/math.sqrt(argum)\n if (w>1): w=1\n if (w<-1): w=-1\n \n H[i,j]=math.acos(w)\n if H[i,j] < 0:\n print('b')\n break\n \n if B[i,j] > G[i,j]:\n H[i,j]=2*math.pi-H[i,j]\n \n if (r[i,j] <= g[i,j]) & (r[i,j] <= b[i,j]): \n S[i,j]=1-3*r[i,j]\n if (g[i,j] <= r[i,j]) & (g[i,j] <= b[i,j]): \n S[i,j]=1-3*g[i,j]\n if (b[i,j] <= r[i,j]) & (b[i,j] <= g[i,j]): \n S[i,j]=1-3*b[i,j]\n \n #H*=179\n #S*=255\n #I*=255\n hsi=cv2.merge([H,S,I])\n return hsi", "_____no_output_____" ], [ "def hsi2rgb(hsi):\n H,S,I = cv2.split(hsi)\n #H=H/179\n #S=S/255\n #I=I/255\n x=H.shape[0]\n y=H.shape[1]\n R=np.empty([x,y])\n G=np.empty([x,y])\n B=np.empty([x,y])\n r=np.empty([x,y])\n g=np.empty([x,y])\n b=np.empty([x,y])\n\n \n for i in range(0, x):\n for j in range(0,y):\n if (S[i,j] >1): S[i,j]=1\n if (I[i,j] >1): I[i,j]=1\n if (S[i,j] ==0): \n R[i,j]=I[i,j]\n G[i,j]=I[i,j]\n B[i,j]=I[i,j]\n else:\n ums=(1-S[i,j])/3\n if (H[i,j]>=0) and (H[i,j]<np.radians(120)):\n b[i,j]=ums\n r[i,j]= 1/3*(1+(S[i,j]*np.cos(H[i,j])/np.cos(np.radians(60)-H[i,j])))\n g[i,j]=1-r[i,j]-b[i,j]\n elif (H[i,j]>=np.radians(120)) and (H[i,j]<np.radians(240)):\n H[i,j]-=np.radians(120)\n r[i,j]=ums\n g[i,j]=1/3*(1+(S[i,j]*np.cos(H[i,j])/np.cos(np.radians(60)-H[i,j])))\n b[i,j]=1-r[i,j]-g[i,j]\n elif (H[i,j]>=np.radians(240)) and (H[i,j]<np.radians(360)):\n H[i,j]-=np.radians(240)\n g[i,j]=ums\n b[i,j]=1/3*(1+(S[i,j]*np.cos(H[i,j])/np.cos(np.radians(60)-H[i,j])))\n r[i,j]=1-g[i,j]-b[i,j]\n else:\n print(\"fuera de rango\")\n break\n if (r[i,j]<0): r[i,j]=0\n if (g[i,j]<0): g[i,j]=0\n if (b[i,j]<0): b[i,j]=0\n R[i,j]=3*I[i,j]*r[i,j]\n G[i,j]=3*I[i,j]*g[i,j]\n B[i,j]=3*I[i,j]*b[i,j]\n if (R[i,j]>1): R[i,j]=1\n if (G[i,j]>1): G[i,j]=1\n if (B[i,j]>1): B[i,j]=1 \n rgb=cv2.merge([R,G,B])*255\n return rgb.astype(np.uint8)", "_____no_output_____" ], [ "#Imagen Original\nimagen = cv2.imread(\"img/5.jpg\")\nmi_rgb= cv2.cvtColor(imagen, cv2.COLOR_BGR2RGB)\nplt.imshow(mi_rgb)", "_____no_output_____" ], [ "# Realizar conversión\nmi_hsi= rgb2hsi(mi_rgb) # RGB a HSI \nh,s,i = cv2.split(mi_hsi)\n\n# Realizar el cambio de color\nh=np.where((h>=640/179) & (h<=822/179),30/179,h)\nmi_hsi=cv2.merge([h,s,i])\n\nn_rgb= hsi2rgb(mi_hsi) # HSI a RGB\nplt.imshow(n_rgb)\n\n# Guardar\nfinal=cv2.cvtColor(n_rgb, cv2.COLOR_RGB2BGR)\ncv2.imwrite('result/hsi.png',final)", "_____no_output_____" ], [ "video= cv2.VideoCapture('video/tafirol.mp4')\n\nfps=video.get(cv2.CAP_PROP_FPS)\ncodec =cv2.VideoWriter_fourcc(*'XVID')\nsize = (int(video.get(cv2.CAP_PROP_FRAME_WIDTH)),\n int(video.get(cv2.CAP_PROP_FRAME_HEIGHT)))\nnuevoVideo= cv2.VideoWriter('result/hsi.mp4',codec,fps,size) \n\ni=0\nsuccess, frame2 = video.read()\nwhile success:\n i+=1\n # hacemos el cambio solo de un segmento del video\n if (i>=53) & (i<224):\n print(i,\"trabajando\")\n mi_rgb= cv2.cvtColor(frame2, cv2.COLOR_BGR2RGB)\n mi_hsi= rgb2hsi(mi_rgb) # RGB a HSI \n h,s,j = cv2.split(mi_hsi)\n h=np.where((h>=640/179) & (h<=822/179),30/179,h)\n nuevo_hsi= cv2.merge([h,s,j])\n nuevo_rgb= hsi2rgb(nuevo_hsi)\n nuevo_bgr= cv2.cvtColor(nuevo_rgb, cv2.COLOR_RGB2BGR)\n nuevoVideo.write(nuevo_bgr)\n success, frame2 = video.read()\n\n \nvideo.release()\nnuevoVideo.release()", "53 trabajando\n54 trabajando\n55 trabajando\n56 trabajando\n57 trabajando\n58 trabajando\n59 trabajando\n60 trabajando\n61 trabajando\n62 trabajando\n63 trabajando\n64 trabajando\n65 trabajando\n66 trabajando\n67 trabajando\n68 trabajando\n69 trabajando\n70 trabajando\n71 trabajando\n72 trabajando\n73 trabajando\n74 trabajando\n75 trabajando\n76 trabajando\n77 trabajando\n78 trabajando\n79 trabajando\n80 trabajando\n81 trabajando\n82 trabajando\n83 trabajando\n84 trabajando\n85 trabajando\n86 trabajando\n87 trabajando\n88 trabajando\n89 trabajando\n90 trabajando\n91 trabajando\n92 trabajando\n93 trabajando\n94 trabajando\n95 trabajando\n96 trabajando\n97 trabajando\n98 trabajando\n99 trabajando\n100 trabajando\n101 trabajando\n102 trabajando\n103 trabajando\n104 trabajando\n105 trabajando\n106 trabajando\n107 trabajando\n108 trabajando\n109 trabajando\n110 trabajando\n111 trabajando\n112 trabajando\n113 trabajando\n114 trabajando\n115 trabajando\n116 trabajando\n117 trabajando\n118 trabajando\n119 trabajando\n120 trabajando\n121 trabajando\n122 trabajando\n123 trabajando\n124 trabajando\n125 trabajando\n126 trabajando\n127 trabajando\n128 trabajando\n129 trabajando\n130 trabajando\n131 trabajando\n132 trabajando\n133 trabajando\n134 trabajando\n135 trabajando\n136 trabajando\n137 trabajando\n138 trabajando\n139 trabajando\n140 trabajando\n141 trabajando\n142 trabajando\n143 trabajando\n144 trabajando\n145 trabajando\n146 trabajando\n147 trabajando\n148 trabajando\n149 trabajando\n150 trabajando\n151 trabajando\n152 trabajando\n153 trabajando\n154 trabajando\n155 trabajando\n156 trabajando\n157 trabajando\n158 trabajando\n159 trabajando\n160 trabajando\n161 trabajando\n162 trabajando\n163 trabajando\n164 trabajando\n165 trabajando\n166 trabajando\n167 trabajando\n168 trabajando\n169 trabajando\n170 trabajando\n171 trabajando\n172 trabajando\n173 trabajando\n174 trabajando\n175 trabajando\n176 trabajando\n177 trabajando\n178 trabajando\n179 trabajando\n180 trabajando\n181 trabajando\n182 trabajando\n183 trabajando\n184 trabajando\n185 trabajando\n186 trabajando\n187 trabajando\n188 trabajando\n189 trabajando\n190 trabajando\n191 trabajando\n192 trabajando\n193 trabajando\n194 trabajando\n195 trabajando\n196 trabajando\n197 trabajando\n198 trabajando\n199 trabajando\n200 trabajando\n201 trabajando\n202 trabajando\n203 trabajando\n204 trabajando\n205 trabajando\n206 trabajando\n207 trabajando\n208 trabajando\n209 trabajando\n210 trabajando\n211 trabajando\n212 trabajando\n213 trabajando\n214 trabajando\n215 trabajando\n216 trabajando\n217 trabajando\n218 trabajando\n219 trabajando\n220 trabajando\n221 trabajando\n222 trabajando\n223 trabajando\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code" ] ]
cbfd2d44d9223db220b66eaea781ef142a624e15
15,495
ipynb
Jupyter Notebook
some_json_wrangling.ipynb
andrewm4894/colabs
1887e4ef4b7ec897ad16f5176e03ef196b06ef80
[ "Apache-2.0" ]
null
null
null
some_json_wrangling.ipynb
andrewm4894/colabs
1887e4ef4b7ec897ad16f5176e03ef196b06ef80
[ "Apache-2.0" ]
null
null
null
some_json_wrangling.ipynb
andrewm4894/colabs
1887e4ef4b7ec897ad16f5176e03ef196b06ef80
[ "Apache-2.0" ]
1
2022-03-01T09:26:03.000Z
2022-03-01T09:26:03.000Z
50.970395
1,881
0.397548
[ [ [ "<a href=\"https://colab.research.google.com/github/andrewm4894/colabs/blob/master/some_json_wrangling.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "import pandas as pd\n\ndata = [{\"event_date\":\"20201107\",\"event_timestamp\":\"1604801718108000\",\"event_name\":\"session_start\",\"event_params\":[{\"key\":\"firebase_event_origin\",\"value\":{\"string_value\":\"auto\"}},{\"key\":\"ga_session_id\",\"value\":{\"int_value\":\"1604801718\"}},{\"key\":\"engaged_session_event\",\"value\":{\"int_value\":\"1\"}},{\"key\":\"session_engaged\",\"value\":{\"int_value\":\"1\"}},{\"key\":\"ga_session_number\",\"value\":{\"int_value\":\"8\"}}],\"event_previous_timestamp\":\"1604785674744000\",\"event_bundle_sequence_id\":\"13\",\"event_server_timestamp_offset\":\"754702\",\"user_pseudo_id\":\"cb052c8ce7b261aecf783ce043089fb3\",\"user_properties\":[{\"key\":\"ga_session_id\",\"value\":{\"int_value\":\"1604801718\",\"set_timestamp_micros\":\"1604801718108000\"}},{\"key\":\"first_open_time\",\"value\":{\"int_value\":\"1562979600000\",\"set_timestamp_micros\":\"1562977643627000\"}},{\"key\":\"ga_session_number\",\"value\":{\"int_value\":\"8\",\"set_timestamp_micros\":\"1604801718108000\"}}],\"user_first_touch_timestamp\":\"1562977643627000\",\"device\":{\"category\":\"mobile\",\"mobile_brand_name\":\"Google\",\"mobile_model_name\":\"Pixel\",\"mobile_os_hardware_model\":\"Pixel\",\"operating_system\":\"ANDROID\",\"operating_system_version\":\"10\",\"language\":\"en-us\",\"is_limited_ad_tracking\":\"No\",\"time_zone_offset_seconds\":\"-21600\"},\"geo\":{\"continent\":\"Americas\",\"country\":\"United States\",\"region\":\"Texas\",\"city\":\"Austin\",\"sub_continent\":\"Northern America\",\"metro\":\"(not set)\"},\"app_info\":{\"id\":\"org.livingletter.hymnal\",\"version\":\"1.1.7\",\"firebase_app_id\":\"1:76837103840:android:e1d753a7fbfeeaac\",\"install_source\":\"com.android.vending\"},\"traffic_source\":{\"medium\":\"organic\",\"source\":\"google-play\"},\"stream_id\":\"1440534155\",\"platform\":\"ANDROID\",\"items\":[]},\n{\"event_date\":\"20201107\",\"event_timestamp\":\"1604785674744000\",\"event_name\":\"session_start\",\"event_params\":[{\"key\":\"ga_session_number\",\"value\":{\"int_value\":\"7\"}},{\"key\":\"firebase_event_origin\",\"value\":{\"string_value\":\"auto\"}},{\"key\":\"session_engaged\",\"value\":{\"int_value\":\"1\"}},{\"key\":\"engaged_session_event\",\"value\":{\"int_value\":\"1\"}},{\"key\":\"ga_session_id\",\"value\":{\"int_value\":\"1604785674\"}}],\"event_previous_timestamp\":\"1604680637311000\",\"event_bundle_sequence_id\":\"12\",\"event_server_timestamp_offset\":\"718754\",\"user_pseudo_id\":\"cb052c8ce7b261aecf783ce043089fb3\",\"user_properties\":[{\"key\":\"ga_session_number\",\"value\":{\"int_value\":\"7\",\"set_timestamp_micros\":\"1604785674744000\"}},{\"key\":\"ga_session_id\",\"value\":{\"int_value\":\"1604785674\",\"set_timestamp_micros\":\"1604785674744000\"}},{\"key\":\"first_open_time\",\"value\":{\"int_value\":\"1562979600000\",\"set_timestamp_micros\":\"1562977643627000\"}}],\"user_first_touch_timestamp\":\"1562977643627000\",\"device\":{\"category\":\"mobile\",\"mobile_brand_name\":\"Google\",\"mobile_model_name\":\"Pixel\",\"mobile_os_hardware_model\":\"Pixel\",\"operating_system\":\"ANDROID\",\"operating_system_version\":\"10\",\"language\":\"en-us\",\"is_limited_ad_tracking\":\"No\",\"time_zone_offset_seconds\":\"-21600\"},\"geo\":{\"continent\":\"Americas\",\"country\":\"United States\",\"region\":\"Texas\",\"city\":\"Austin\",\"sub_continent\":\"Northern America\",\"metro\":\"(not set)\"},\"app_info\":{\"id\":\"org.livingletter.hymnal\",\"version\":\"1.1.7\",\"firebase_app_id\":\"1:76837103840:android:e1d753a7fbfeeaac\",\"install_source\":\"com.android.vending\"},\"traffic_source\":{\"medium\":\"organic\",\"source\":\"google-play\"},\"stream_id\":\"1440534155\",\"platform\":\"ANDROID\",\"items\":[]}]\n\ndf = pd.json_normalize(\n data, \n record_path='event_params',\n record_prefix='event_params_', \n meta=['event_date', 'event_timestamp', 'event_name','device']\n )\ndf_device = pd.json_normalize(df[\"device\"]).add_prefix('device_')\ndf = pd.concat([df,df_device], axis=1)\ndel df['device']\ndisplay(df)", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code" ] ]
cbfd46eacc65f6a6a17b1296573f075d0fca8ee9
26,953
ipynb
Jupyter Notebook
casos-graves-covid-19-recife/Etapa_1.ipynb
Flavio-Varejao/Projeto-ETL-1
9fde9c9fbcbc278ee82695375643a8034e440ce9
[ "MIT" ]
null
null
null
casos-graves-covid-19-recife/Etapa_1.ipynb
Flavio-Varejao/Projeto-ETL-1
9fde9c9fbcbc278ee82695375643a8034e440ce9
[ "MIT" ]
null
null
null
casos-graves-covid-19-recife/Etapa_1.ipynb
Flavio-Varejao/Projeto-ETL-1
9fde9c9fbcbc278ee82695375643a8034e440ce9
[ "MIT" ]
null
null
null
36.033422
157
0.36534
[ [ [ "# Etapa 1 - Extração e Validação dos Dados", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport pandera as pa", "_____no_output_____" ], [ "#Criação do dataframe e checagem dos dados\ndf = pd.read_csv(\"93f4e8c2-430f-4142-86b3-96dab4905b4e.csv\", parse_dates=['data_inicio_primeiros_sintomas','data_notificacao','data_obito'])\ndf.head(10)", "_____no_output_____" ], [ "#Verificação dos tipos de dados\ndf.dtypes", "_____no_output_____" ], [ "#Observações:\n\n#A coluna 'idade' permanecerá como 'object' porque também aceita os meses como idade, conforme o dicionário da base de dados (casosgravescovid.json)\n\n#A coluna 'etnia' será convertida para 'object', conforme o dicionário da base de dados (casosgravescovid.json)\n\n#A coluna 'data_inicio_primeiros_sintomas' deveria ser do tipo datetime, conforme registrado no dicionário da\n#base de dados (casosgravescovid.json), mas há inconsistências nas células preenchidas, como por exemplo uma data com \n#seguinte formato: '00/01/1900'. Logo, essas células serão tratadas na etapa de transformação.\n\n#As colunas 'data_notificacao' e 'data_obito' serão convertidas para 'datetime', conforme o dicionário da base de dados (casosgravescovid.json)", "_____no_output_____" ], [ "#Inclusão do parâmetro 'parse_dates' no dataframe", "_____no_output_____" ], [ "df.dtypes", "_____no_output_____" ], [ "#Validação dos dados", "_____no_output_____" ], [ "#O parâmetro 'nullable=True' foi adcionado em algumas colunas para validar os campos nulos\n\n#As colunas 'etnia' e 'data_inicio_primeiros_sintomas' serão validadas mais adiante na etapa de limpeza", "_____no_output_____" ], [ "schema = pa.DataFrameSchema(\n columns = {\n \"_id\": pa.Column(pa.Int),\n \"sexo\": pa.Column(pa.String),\n \"idade\": pa.Column(pa.String),\n \"raca\": pa.Column(pa.String, nullable=True),\n #\"etnia\": pa.Column(pa.String),\n \"morbidades_previas\": pa.Column(pa.String, nullable=True),\n \"outras_morbidades_previas\": pa.Column(pa.String, nullable=True),\n \"evolucao\": pa.Column(pa.String, nullable=True),\n #\"data_inicio_primeiros_sintomas\": pa.Column(pa.DateTime),\n \"data_notificacao\": pa.Column(pa.DateTime),\n \"data_obito\": pa.Column(pa.DateTime, nullable=True),\n \"classificacao_final\": pa.Column(pa.String)\n }\n)", "_____no_output_____" ], [ "schema.validate(df)", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfd48e5747f16f7501b0f60df003ca1c82f9608
23,443
ipynb
Jupyter Notebook
03 Stats/17 Project Investigate AB/Analyze_ab_test_results_notebook.ipynb
Alashmony/DA_Udacity
12615a1d50be6b8260f021f62b3d4ec34ecc06a3
[ "Unlicense" ]
null
null
null
03 Stats/17 Project Investigate AB/Analyze_ab_test_results_notebook.ipynb
Alashmony/DA_Udacity
12615a1d50be6b8260f021f62b3d4ec34ecc06a3
[ "Unlicense" ]
null
null
null
03 Stats/17 Project Investigate AB/Analyze_ab_test_results_notebook.ipynb
Alashmony/DA_Udacity
12615a1d50be6b8260f021f62b3d4ec34ecc06a3
[ "Unlicense" ]
null
null
null
29.30375
574
0.602696
[ [ [ "## Analyze A/B Test Results\n\nThis project will assure you have mastered the subjects covered in the statistics lessons. The hope is to have this project be as comprehensive of these topics as possible. Good luck!\n\n## Table of Contents\n- [Introduction](#intro)\n- [Part I - Probability](#probability)\n- [Part II - A/B Test](#ab_test)\n- [Part III - Regression](#regression)\n\n\n<a id='intro'></a>\n### Introduction\n\nA/B tests are very commonly performed by data analysts and data scientists. It is important that you get some practice working with the difficulties of these \n\nFor this project, you will be working to understand the results of an A/B test run by an e-commerce website. Your goal is to work through this notebook to help the company understand if they should implement the new page, keep the old page, or perhaps run the experiment longer to make their decision.\n\n**As you work through this notebook, follow along in the classroom and answer the corresponding quiz questions associated with each question.** The labels for each classroom concept are provided for each question. This will assure you are on the right track as you work through the project, and you can feel more confident in your final submission meeting the criteria. As a final check, assure you meet all the criteria on the [RUBRIC](https://review.udacity.com/#!/projects/37e27304-ad47-4eb0-a1ab-8c12f60e43d0/rubric).\n\n<a id='probability'></a>\n#### Part I - Probability\n\nTo get started, let's import our libraries.", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nimport random\nimport matplotlib.pyplot as plt\n%matplotlib inline\n#We are setting the seed to assure you get the same answers on quizzes as we set up\nrandom.seed(42)", "_____no_output_____" ] ], [ [ "`1.` Now, read in the `ab_data.csv` data. Store it in `df`. **Use your dataframe to answer the questions in Quiz 1 of the classroom.**\n\na. Read in the dataset and take a look at the top few rows here:", "_____no_output_____" ], [ "b. Use the below cell to find the number of rows in the dataset.", "_____no_output_____" ], [ "c. The number of unique users in the dataset.", "_____no_output_____" ], [ "d. The proportion of users converted.", "_____no_output_____" ], [ "e. The number of times the `new_page` and `treatment` don't line up.", "_____no_output_____" ], [ "f. Do any of the rows have missing values?", "_____no_output_____" ], [ "`2.` For the rows where **treatment** is not aligned with **new_page** or **control** is not aligned with **old_page**, we cannot be sure if this row truly received the new or old page. Use **Quiz 2** in the classroom to provide how we should handle these rows. \n\na. Now use the answer to the quiz to create a new dataset that meets the specifications from the quiz. Store your new dataframe in **df2**.", "_____no_output_____" ] ], [ [ "# Double Check all of the correct rows were removed - this should be 0\ndf2[((df2['group'] == 'treatment') == (df2['landing_page'] == 'new_page')) == False].shape[0]", "_____no_output_____" ] ], [ [ "`3.` Use **df2** and the cells below to answer questions for **Quiz3** in the classroom.", "_____no_output_____" ], [ "a. How many unique **user_id**s are in **df2**?", "_____no_output_____" ], [ "b. There is one **user_id** repeated in **df2**. What is it?", "_____no_output_____" ], [ "c. What is the row information for the repeat **user_id**? ", "_____no_output_____" ], [ "d. Remove **one** of the rows with a duplicate **user_id**, but keep your dataframe as **df2**.", "_____no_output_____" ], [ "`4.` Use **df2** in the below cells to answer the quiz questions related to **Quiz 4** in the classroom.\n\na. What is the probability of an individual converting regardless of the page they receive?", "_____no_output_____" ], [ "b. Given that an individual was in the `control` group, what is the probability they converted?", "_____no_output_____" ], [ "c. Given that an individual was in the `treatment` group, what is the probability they converted?", "_____no_output_____" ], [ "d. What is the probability that an individual received the new page?", "_____no_output_____" ], [ "e. Consider your results from a. through d. above, and explain below whether you think there is sufficient evidence to say that the new treatment page leads to more conversions.", "_____no_output_____" ], [ "**Your answer goes here.**", "_____no_output_____" ], [ "<a id='ab_test'></a>\n### Part II - A/B Test\n\nNotice that because of the time stamp associated with each event, you could technically run a hypothesis test continuously as each observation was observed. \n\nHowever, then the hard question is do you stop as soon as one page is considered significantly better than another or does it need to happen consistently for a certain amount of time? How long do you run to render a decision that neither page is better than another? \n\nThese questions are the difficult parts associated with A/B tests in general. \n\n\n`1.` For now, consider you need to make the decision just based on all the data provided. If you want to assume that the old page is better unless the new page proves to be definitely better at a Type I error rate of 5%, what should your null and alternative hypotheses be? You can state your hypothesis in terms of words or in terms of **$p_{old}$** and **$p_{new}$**, which are the converted rates for the old and new pages.", "_____no_output_____" ], [ "**Put your answer here.**", "_____no_output_____" ], [ "`2.` Assume under the null hypothesis, $p_{new}$ and $p_{old}$ both have \"true\" success rates equal to the **converted** success rate regardless of page - that is $p_{new}$ and $p_{old}$ are equal. Furthermore, assume they are equal to the **converted** rate in **ab_data.csv** regardless of the page. <br><br>\n\nUse a sample size for each page equal to the ones in **ab_data.csv**. <br><br>\n\nPerform the sampling distribution for the difference in **converted** between the two pages over 10,000 iterations of calculating an estimate from the null. <br><br>\n\nUse the cells below to provide the necessary parts of this simulation. If this doesn't make complete sense right now, don't worry - you are going to work through the problems below to complete this problem. You can use **Quiz 5** in the classroom to make sure you are on the right track.<br><br>", "_____no_output_____" ], [ "a. What is the **convert rate** for $p_{new}$ under the null? ", "_____no_output_____" ], [ "b. What is the **convert rate** for $p_{old}$ under the null? <br><br>", "_____no_output_____" ], [ "c. What is $n_{new}$?", "_____no_output_____" ], [ "d. What is $n_{old}$?", "_____no_output_____" ], [ "e. Simulate $n_{new}$ transactions with a convert rate of $p_{new}$ under the null. Store these $n_{new}$ 1's and 0's in **new_page_converted**.", "_____no_output_____" ], [ "f. Simulate $n_{old}$ transactions with a convert rate of $p_{old}$ under the null. Store these $n_{old}$ 1's and 0's in **old_page_converted**.", "_____no_output_____" ], [ "g. Find $p_{new}$ - $p_{old}$ for your simulated values from part (e) and (f).", "_____no_output_____" ], [ "h. Simulate 10,000 $p_{new}$ - $p_{old}$ values using this same process similarly to the one you calculated in parts **a. through g.** above. Store all 10,000 values in a numpy array called **p_diffs**.", "_____no_output_____" ], [ "i. Plot a histogram of the **p_diffs**. Does this plot look like what you expected? Use the matching problem in the classroom to assure you fully understand what was computed here.", "_____no_output_____" ], [ "j. What proportion of the **p_diffs** are greater than the actual difference observed in **ab_data.csv**?", "_____no_output_____" ], [ "k. In words, explain what you just computed in part **j.** What is this value called in scientific studies? What does this value mean in terms of whether or not there is a difference between the new and old pages?", "_____no_output_____" ], [ "**Put your answer here.**", "_____no_output_____" ], [ "l. We could also use a built-in to achieve similar results. Though using the built-in might be easier to code, the above portions are a walkthrough of the ideas that are critical to correctly thinking about statistical significance. Fill in the below to calculate the number of conversions for each page, as well as the number of individuals who received each page. Let `n_old` and `n_new` refer the the number of rows associated with the old page and new pages, respectively.", "_____no_output_____" ] ], [ [ "import statsmodels.api as sm\n\nconvert_old = \nconvert_new = \nn_old = \nn_new = ", "_____no_output_____" ] ], [ [ "m. Now use `stats.proportions_ztest` to compute your test statistic and p-value. [Here](http://knowledgetack.com/python/statsmodels/proportions_ztest/) is a helpful link on using the built in.", "_____no_output_____" ], [ "n. What do the z-score and p-value you computed in the previous question mean for the conversion rates of the old and new pages? Do they agree with the findings in parts **j.** and **k.**?", "_____no_output_____" ], [ "**Put your answer here.**", "_____no_output_____" ], [ "<a id='regression'></a>\n### Part III - A regression approach\n\n`1.` In this final part, you will see that the result you acheived in the previous A/B test can also be acheived by performing regression.<br><br>\n\na. Since each row is either a conversion or no conversion, what type of regression should you be performing in this case?", "_____no_output_____" ], [ "**Put your answer here.**", "_____no_output_____" ], [ "b. The goal is to use **statsmodels** to fit the regression model you specified in part **a.** to see if there is a significant difference in conversion based on which page a customer receives. However, you first need to create a column for the intercept, and create a dummy variable column for which page each user received. Add an **intercept** column, as well as an **ab_page** column, which is 1 when an individual receives the **treatment** and 0 if **control**.", "_____no_output_____" ], [ "c. Use **statsmodels** to import your regression model. Instantiate the model, and fit the model using the two columns you created in part **b.** to predict whether or not an individual converts.", "_____no_output_____" ], [ "d. Provide the summary of your model below, and use it as necessary to answer the following questions.", "_____no_output_____" ], [ "e. What is the p-value associated with **ab_page**? Why does it differ from the value you found in **Part II**?<br><br> **Hint**: What are the null and alternative hypotheses associated with your regression model, and how do they compare to the null and alternative hypotheses in the **Part II**?", "_____no_output_____" ], [ "**Put your answer here.**", "_____no_output_____" ], [ "f. Now, you are considering other things that might influence whether or not an individual converts. Discuss why it is a good idea to consider other factors to add into your regression model. Are there any disadvantages to adding additional terms into your regression model?", "_____no_output_____" ], [ "**Put your answer here.**", "_____no_output_____" ], [ "g. Now along with testing if the conversion rate changes for different pages, also add an effect based on which country a user lives. You will need to read in the **countries.csv** dataset and merge together your datasets on the approporiate rows. [Here](https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.join.html) are the docs for joining tables. \n\nDoes it appear that country had an impact on conversion? Don't forget to create dummy variables for these country columns - **Hint: You will need two columns for the three dummy variables.** Provide the statistical output as well as a written response to answer this question.", "_____no_output_____" ] ], [ [ "countries_df = pd.read_csv('./countries.csv')\ndf_new = countries_df.set_index('user_id').join(df2.set_index('user_id'), how='inner')", "_____no_output_____" ], [ "### Create the necessary dummy variables", "_____no_output_____" ] ], [ [ "h. Though you have now looked at the individual factors of country and page on conversion, we would now like to look at an interaction between page and country to see if there significant effects on conversion. Create the necessary additional columns, and fit the new model. \n\nProvide the summary results, and your conclusions based on the results.", "_____no_output_____" ] ], [ [ "### Fit Your Linear Model And Obtain the Results", "_____no_output_____" ] ], [ [ "<a id='conclusions'></a>\n## Conclusions\n\nCongratulations on completing the project! \n\n### Gather Submission Materials\n\nOnce you are satisfied with the status of your Notebook, you should save it in a format that will make it easy for others to read. You can use the __File -> Download as -> HTML (.html)__ menu to save your notebook as an .html file. If you are working locally and get an error about \"No module name\", then open a terminal and try installing the missing module using `pip install <module_name>` (don't include the \"<\" or \">\" or any words following a period in the module name).\n\nYou will submit both your original Notebook and an HTML or PDF copy of the Notebook for review. There is no need for you to include any data files with your submission. If you made reference to other websites, books, and other resources to help you in solving tasks in the project, make sure that you document them. It is recommended that you either add a \"Resources\" section in a Markdown cell at the end of the Notebook report, or you can include a `readme.txt` file documenting your sources.\n\n### Submit the Project\n\nWhen you're ready, click on the \"Submit Project\" button to go to the project submission page. You can submit your files as a .zip archive or you can link to a GitHub repository containing your project files. If you go with GitHub, note that your submission will be a snapshot of the linked repository at time of submission. It is recommended that you keep each project in a separate repository to avoid any potential confusion: if a reviewer gets multiple folders representing multiple projects, there might be confusion regarding what project is to be evaluated.\n\nIt can take us up to a week to grade the project, but in most cases it is much faster. You will get an email once your submission has been reviewed. If you are having any problems submitting your project or wish to check on the status of your submission, please email us at [email protected]. In the meantime, you should feel free to continue on with your learning journey by beginning the next module in the program.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cbfd4fe772955812b52d66d5eddfdb877a1a2f12
16,830
ipynb
Jupyter Notebook
code/chap18.ipynb
arunkhattri/ModSimPy
f90be88fd401edf11d16c72b11934f1e7160aeb5
[ "MIT" ]
1
2018-10-04T21:43:09.000Z
2018-10-04T21:43:09.000Z
code/chap18.ipynb
arunkhattri/ModSimPy
f90be88fd401edf11d16c72b11934f1e7160aeb5
[ "MIT" ]
null
null
null
code/chap18.ipynb
arunkhattri/ModSimPy
f90be88fd401edf11d16c72b11934f1e7160aeb5
[ "MIT" ]
null
null
null
22.774019
363
0.522638
[ [ [ "# Modeling and Simulation in Python\n\nChapter 18\n\nCopyright 2017 Allen Downey\n\nLicense: [Creative Commons Attribution 4.0 International](https://creativecommons.org/licenses/by/4.0)\n", "_____no_output_____" ] ], [ [ "# Configure Jupyter so figures appear in the notebook\n%matplotlib inline\n\n# Configure Jupyter to display the assigned value after an assignment\n%config InteractiveShell.ast_node_interactivity='last_expr_or_assign'\n\n# import functions from the modsim.py module\nfrom modsim import *", "_____no_output_____" ] ], [ [ "### Code from the previous chapter\n\nRead the data.", "_____no_output_____" ] ], [ [ "data = pd.read_csv('data/glucose_insulin.csv', index_col='time');", "_____no_output_____" ] ], [ [ "Interpolate the insulin data.", "_____no_output_____" ] ], [ [ "I = interpolate(data.insulin)", "_____no_output_____" ] ], [ [ "Initialize the parameters", "_____no_output_____" ] ], [ [ "G0 = 290\nk1 = 0.03\nk2 = 0.02\nk3 = 1e-05", "_____no_output_____" ] ], [ [ "To estimate basal levels, we'll use the concentrations at `t=0`.", "_____no_output_____" ] ], [ [ "Gb = data.glucose[0]\nIb = data.insulin[0]", "_____no_output_____" ] ], [ [ "Create the initial condtions.", "_____no_output_____" ] ], [ [ "init = State(G=G0, X=0)", "_____no_output_____" ] ], [ [ "Make the `System` object.", "_____no_output_____" ] ], [ [ "t_0 = get_first_label(data)\nt_end = get_last_label(data)", "_____no_output_____" ], [ "system = System(init=init, \n k1=k1, k2=k2, k3=k3,\n I=I, Gb=Gb, Ib=Ib,\n t_0=t_0, t_end=t_end, dt=2)", "_____no_output_____" ], [ "def update_func(state, t, system):\n \"\"\"Updates the glucose minimal model.\n \n state: State object\n t: time in min\n system: System object\n \n returns: State object\n \"\"\"\n G, X = state\n unpack(system)\n \n dGdt = -k1 * (G - Gb) - X*G\n dXdt = k3 * (I(t) - Ib) - k2 * X\n \n G += dGdt * dt\n X += dXdt * dt\n\n return State(G=G, X=X)", "_____no_output_____" ], [ "def run_simulation(system, update_func):\n \"\"\"Runs a simulation of the system.\n \n system: System object\n update_func: function that updates state\n \n returns: TimeFrame\n \"\"\"\n unpack(system)\n \n frame = TimeFrame(columns=init.index)\n frame.row[t_0] = init\n ts = linrange(t_0, t_end, dt)\n \n for t in ts:\n frame.row[t+dt] = update_func(frame.row[t], t, system)\n \n return frame", "_____no_output_____" ], [ "%time results = run_simulation(system, update_func);", "_____no_output_____" ] ], [ [ "### Numerical solution\n\nIn the previous chapter, we approximated the differential equations with difference equations, and solved them using `run_simulation`.\n\nIn this chapter, we solve the differential equation numerically using `run_ode_solver`, which is a wrapper for the SciPy ODE solver.\n\nInstead of an update function, we provide a slope function that evaluates the right-hand side of the differential equations. We don't have to do the update part; the solver does it for us.", "_____no_output_____" ] ], [ [ "def slope_func(state, t, system):\n \"\"\"Computes derivatives of the glucose minimal model.\n \n state: State object\n t: time in min\n system: System object\n \n returns: derivatives of G and X\n \"\"\"\n G, X = state\n unpack(system)\n \n dGdt = -k1 * (G - Gb) - X*G\n dXdt = k3 * (I(t) - Ib) - k2 * X\n \n return dGdt, dXdt", "_____no_output_____" ] ], [ [ "We can test the slope function with the initial conditions.", "_____no_output_____" ] ], [ [ "slope_func(init, 0, system)", "_____no_output_____" ] ], [ [ "Here's how we run the ODE solver.", "_____no_output_____" ] ], [ [ "%time results2, details = run_ode_solver(system, slope_func, t_eval=data.index);", "_____no_output_____" ] ], [ [ "`details` is a `ModSimSeries` object with information about how the solver worked.", "_____no_output_____" ] ], [ [ "details", "_____no_output_____" ] ], [ [ "`results` is a `TimeFrame` with one row for each time step and one column for each state variable:", "_____no_output_____" ] ], [ [ "results2", "_____no_output_____" ] ], [ [ "Plotting the results from `run_simulation` and `run_ode_solver`, we can see that they are not very different.", "_____no_output_____" ] ], [ [ "plot(results.G, 'g-')\nplot(results2.G, 'b-')\nplot(data.glucose, 'bo')", "_____no_output_____" ] ], [ [ "The differences in `G` are less than 1%.", "_____no_output_____" ] ], [ [ "diff = results.G - results2.G\npercent_diff = diff / results2.G * 100\npercent_diff.dropna()", "_____no_output_____" ] ], [ [ "### Optimization", "_____no_output_____" ], [ "Now let's find the parameters that yield the best fit for the data. ", "_____no_output_____" ], [ "We'll use these values as an initial estimate and iteratively improve them.", "_____no_output_____" ] ], [ [ "params = Params(G0 = 290,\n k1 = 0.03,\n k2 = 0.02,\n k3 = 1e-05)", "_____no_output_____" ] ], [ [ "`make_system` takes the parameters and actual data and returns a `System` object.", "_____no_output_____" ] ], [ [ "def make_system(params, data):\n \"\"\"Makes a System object with the given parameters.\n \n params: sequence of G0, k1, k2, k3\n data: DataFrame with `glucose` and `insulin`\n \n returns: System object\n \"\"\"\n G0, k1, k2, k3 = params\n \n Gb = data.glucose[0]\n Ib = data.insulin[0]\n \n t_0 = get_first_label(data)\n t_end = get_last_label(data)\n\n init = State(G=G0, X=0)\n \n return System(G0=G0, k1=k1, k2=k2, k3=k3,\n init=init, Gb=Gb, Ib=Ib,\n t_0=t_0, t_end=t_end)", "_____no_output_____" ], [ "system = make_system(params, data)", "_____no_output_____" ] ], [ [ "`error_func` takes the parameters and actual data, makes a `System` object, and runs `odeint`, then compares the results to the data. It returns an array of errors.", "_____no_output_____" ] ], [ [ "def error_func(params, data):\n \"\"\"Computes an array of errors to be minimized.\n \n params: sequence of parameters\n data: DataFrame of values to be matched\n \n returns: array of errors\n \"\"\"\n print(params)\n \n # make a System with the given parameters\n system = make_system(params, data)\n \n # solve the ODE\n results, details = run_ode_solver(system, slope_func, t_eval=data.index)\n \n # compute the difference between the model\n # results and actual data\n errors = results.G - data.glucose\n return errors", "_____no_output_____" ] ], [ [ "When we call `error_func`, we provide a sequence of parameters as a single object.", "_____no_output_____" ], [ "Here's how that works:", "_____no_output_____" ] ], [ [ "error_func(params, data)", "_____no_output_____" ] ], [ [ "`fit_leastsq` is a wrapper for `scipy.optimize.leastsq`", "_____no_output_____" ], [ "Here's how we call it.", "_____no_output_____" ] ], [ [ "best_params, fit_details = fit_leastsq(error_func, params, data)", "_____no_output_____" ] ], [ [ "The first return value is a `Params` object with the best parameters:", "_____no_output_____" ] ], [ [ "best_params", "_____no_output_____" ] ], [ [ "The second return value is a `ModSimSeries` object with information about the results.", "_____no_output_____" ] ], [ [ "fit_details", "_____no_output_____" ], [ "fit_details", "_____no_output_____" ] ], [ [ "Now that we have `best_params`, we can use it to make a `System` object and run it.", "_____no_output_____" ] ], [ [ "system = make_system(best_params, data)\nresults, details = run_ode_solver(system, slope_func, t_eval=data.index)\ndetails.message", "_____no_output_____" ] ], [ [ "Here are the results, along with the data. The first few points of the model don't fit the data, but we don't expect them to.", "_____no_output_____" ] ], [ [ "plot(results.G, label='simulation')\nplot(data.glucose, 'bo', label='glucose data')\n\ndecorate(xlabel='Time (min)',\n ylabel='Concentration (mg/dL)')\n\nsavefig('figs/chap08-fig04.pdf')", "_____no_output_____" ] ], [ [ "### Interpreting parameters\n\nBased on the parameters of the model, we can estimate glucose effectiveness and insulin sensitivity.", "_____no_output_____" ] ], [ [ "def indices(params):\n \"\"\"Compute glucose effectiveness and insulin sensitivity.\n \n params: sequence of G0, k1, k2, k3\n data: DataFrame with `glucose` and `insulin`\n \n returns: State object containing S_G and S_I\n \"\"\"\n G0, k1, k2, k3 = params\n return State(S_G=k1, S_I=k3/k2)", "_____no_output_____" ] ], [ [ "Here are the results.", "_____no_output_____" ] ], [ [ "indices(best_params)", "_____no_output_____" ] ], [ [ "### Under the hood\n\nHere's the source code for `run_ode_solver` and `fit_leastsq`, if you'd like to know how they work.", "_____no_output_____" ] ], [ [ "%psource run_ode_solver", "_____no_output_____" ], [ "%psource fit_leastsq", "_____no_output_____" ] ], [ [ "## Exercises\n\n**Exercise:** Since we don't expect the first few points to agree, it's probably better not to make them part of the optimization process. We can ignore them by leaving them out of the `Series` returned by `error_func`. Modify the last line of `error_func` to return `errors.loc[8:]`, which includes only the elements of the `Series` from `t=8` and up.\n\nDoes that improve the quality of the fit? Does it change the best parameters by much?\n\nNote: You can read more about this use of `loc` [in the Pandas documentation](https://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-integer).", "_____no_output_____" ], [ "**Exercise:** How sensitive are the results to the starting guess for the parameters. If you try different values for the starting guess, do we get the same values for the best parameters?", "_____no_output_____" ], [ "**Related reading:** You might be interested in this article about [people making a DIY artificial pancreas](https://www.bloomberg.com/news/features/2018-08-08/the-250-biohack-that-s-revolutionizing-life-with-diabetes).", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ] ]
cbfd56da5c1aa0b05d43dd6771a6ae29af1e3f2b
17,093
ipynb
Jupyter Notebook
support/GenerateNames.ipynb
Metatab/geoid
7f91dee11f2131381bb57628a55587f5c10f8e6b
[ "BSD-2-Clause" ]
8
2017-10-10T17:25:03.000Z
2021-01-13T22:34:06.000Z
support/GenerateNames.ipynb
Metatab/geoid
7f91dee11f2131381bb57628a55587f5c10f8e6b
[ "BSD-2-Clause" ]
2
2015-09-04T17:20:52.000Z
2015-11-11T17:02:13.000Z
support/GenerateNames.ipynb
Metatab/geoid
7f91dee11f2131381bb57628a55587f5c10f8e6b
[ "BSD-2-Clause" ]
2
2018-08-15T20:52:47.000Z
2020-03-30T08:45:33.000Z
41.690244
155
0.534839
[ [ [ "%load_ext autoreload\n%autoreload 2\nimport ambry\nl = ambry.get_library()\nb = l.bundle('d04w001') # Geoschemas\nsumlevels_p = l.partition('census.gov-acs_geofile-schemas-2009e-sumlevels')", "_____no_output_____" ], [ "sumlevels = {}\nfor row in sumlevels_p.stream(as_dict=True):\n sumlevels[row['sumlevel']] = row['description']", "_____no_output_____" ], [ "from collections import defaultdict, Counter\nfrom geoid import base62_encode\n\ncollector = {}\ngeoids = {}\ndescriptions = {}\n\nfor p in b.partitions:\n #print \"=====\", p.identity.name\n l = {}\n for i, c in enumerate(p.table.columns):\n if i > 5 and c.name not in ('name','geoid', 'memi'):\n l[c.name] = [Counter(), 0]\n descriptions[c.name] = c.description\n \n for i, row in enumerate(p.stream(as_dict=True)):\n if i >= 500:\n break\n \n geoid = row['geoid']\n \n for k in l:\n v = row[k]\n \n \n \n if not str(v).strip():\n continue\n \n \n try:\n # The index is not guarantted to be found in the right position; it could be at the start of the\n # geoid, so we keep track of the most common place it is found\n idx = geoid.index(str(v))\n size = len(str(v))\n \n # Kepp tract of the right end position, not the start, since the end pos is independent of the length\n l[k][0][idx+size] += 1\n l[k][1] = max(l[k][1], size)\n \n except ValueError:\n pass\n \n ordered = []\n for k, v in l.items():\n \n most = v[0].most_common(1)\n \n if most:\n size = v[1]\n start = most[0][0] - size\n\n ordered.append((k, start, size))\n \n ordered = sorted(ordered, key = lambda r: r[1])\n\n #for e in ordered:\n # print \" \", e, len(base62_encode(10**e[2]))\n\n geoids[int(p.grain)] = ordered\n \n for e in ordered:\n collector[e[0]]=(e[2],len(base62_encode(10**e[2])) )\n \n# Print out the lengths array\nout = []\nfor k, v in collector.items():\n out.append('\\'{}\\': {}, # {}'.format(k, v[0], descriptions[k]))\n \nprint '\\n'.join(sorted(out))\n \nfor sl in sorted(geoids):\n ordered = geoids[sl]\n \n print str(sl)+':', str([ str(e[0]) for e in ordered ])+',', \"#\", sumlevels[sl]\n \n \n ", "'aianhh': 4, # American Indian Area/Alaska Native Area/ Hawaiian Home Land (Census)\n'aihhtli': 1, # American Indian Trust Land/ Hawaiian Home Land Indicator\n'aitsce': 3, # American Indian Tribal Subdivision (Census)\n'anrc': 5, # Alaska Native Regional Corporation (FIPS)\n'blkgrp': 1, # Block Group\n'cbsa': 5, # Metropolitan and Micropolitan Statistical Area\n'cdcurr': 2, # Current Congressional District ***\n'cnecta': 3, # New England City and Town Combined Statistical Area\n'concit': 5, # Consolidated City\n'county': 3, # County of current residence\n'cousub': 5, # County Subdivision (FIPS)\n'csa': 3, # Combined Statistical Area\n'division': 1, # Census Division\n'metdiv': 5, # Metropolitan Statistical Area- Metropolitan Division\n'necta': 5, # New England City and Town Area\n'nectadiv': 5, # New England City and Town Area Division\n'place': 5, # Place (FIPS Code)\n'puma5': 4, # Public Use Microdata Area 5% File\n'region': 1, # Census Region\n'sdelm': 5, # State-School District (Elementary)\n'sdsec': 5, # State-School District (Secondary)\n'sduni': 5, # State-School District (Unified)\n'sldl': 3, # State Legislative District Lower\n'sldu': 2, # State Legislative District Upper\n'state': 2, # State (FIPS Code)\n'submcd': 5, # Subminor Civil Division (FIPS)\n'tract': 4, # Census Tract\n'ua': 5, # Urban Area\n'ur': 1, # Urban/Rural\n'us': 1, # US\n10: ['us', 'ur'], # United States\n20: ['ur', 'region'], # Region\n30: ['ur', 'division'], # Division\n40: ['ur', 'state'], # State\n50: ['state', 'county'], # County\n60: ['state', 'county', 'cousub'], # County Subdivision\n67: ['state', 'county', 'cousub', 'submcd'], # State (Puerto Rico Only)-County-County Subdivision-Subbarrio\n70: ['state', 'county', 'cousub', 'place'], # County Subdivision-Place/Remainder\n80: ['state', 'county', 'cousub', 'place', 'tract'], # County Subdivision-Place/Remainder-Census Tract\n140: ['state', 'county', 'tract'], # Census Tract\n150: ['state', 'county', 'tract', 'blkgrp'], # Census Tract-Block Group\n155: ['state', 'place', 'county'], # Place-County\n160: ['state', 'place'], # Place\n170: ['state', 'concit'], # Consolidated City\n172: ['state', 'concit', 'place'], # Consolidated City-Place Within Consolidated City\n230: ['state', 'anrc'], # State-Alaska Native Regional Corporation\n250: ['aianhh'], # American Indian Area/Alaska Native Area/Hawaiian Home Land\n251: ['aianhh', 'aitsce'], # American Indian Area/Alaska NativeArea/HawaiianHomeLand-Tribal Subdivision/Remainder\n252: ['aianhh', 'aihhtli'], # American Indian Area/Alaska Native Area (Reservation or Statistical Entity Only)4\n254: ['aianhh', 'aihhtli'], # American Indian Area (Off-Reservation Trust Land Only)/Hawaiian Home Land\n260: ['state', 'aianhh'], # American Indian Area/Alaska Native Area/Hawaiian Home Land-State\n269: ['state', 'aianhh', 'place'], # American Indian Area/Alaska Native Area/Hawaiian Home Land-Place-Remainder\n270: ['aianhh', 'state', 'county'], # American Indian Area/Alaska Native Area/Hawaiian Home Land-State-County\n280: ['state', 'aianhh'], # State-American Indian Area/Alaska Native Area/Hawaiian Home Land\n283: ['state', 'aianhh', 'aihhtli'], # State-American Indian Area/Alaska Native Area (Reservation or Statistical Entity Only)\n286: ['state', 'aianhh', 'aihhtli'], # State-American Indian Area (Off-Reservation Trust Land Only)/Hawaiian Home Land\n290: ['aianhh', 'aitsce', 'state'], # American Indian Area/Alaska Native Area/Hawaiian Home Land-Tribal Subdivision/Remainder-State\n310: ['cbsa'], # CBSA\n311: ['cbsa', 'state'], # CBSA-State-County\n312: ['cbsa', 'state', 'place'], # CBSA-State-Principal City\n313: ['cbsa', 'state', 'county'], # CBSA-State-County\n314: ['cbsa', 'metdiv'], # Metropolitan Statistical Area/Metropolitan Division\n315: ['cbsa', 'metdiv', 'state'], # Metropolitan Statistical Area/Metropolitan Division-State\n316: ['cbsa', 'metdiv', 'state', 'county'], # Metropolitan Statistical Area/Metropolitan Division-State-County\n320: ['state', 'cbsa'], # State- CBSA\n321: ['state', 'cbsa', 'place'], # State- CBSA -Principal City\n322: ['state', 'cbsa', 'county'], # State- CBSA -County\n323: ['state', 'cbsa', 'metdiv'], # State- Metropolitan Statistical Area/Metropolitan Division\n324: ['state', 'cbsa', 'metdiv', 'county'], # State- Metropolitan Statistical Area/Metropolitan Division-County\n330: ['csa'], # Combined Statistical Area\n331: ['csa', 'state'], # Combined Statistical Area-State\n332: ['csa', 'cbsa'], # Combined Statistical Area-CBSA\n333: ['csa', 'cbsa', 'state'], # Combined Statistical Area-CBSA-State\n335: ['cnecta'], # Combined New England City and Town Area\n336: ['cnecta', 'state'], # Combined New England City and Town Area -State\n337: ['cnecta', 'necta'], # Combined New England City and Town Area -New England City and Town Area\n338: ['cnecta', 'necta', 'state'], # Combined New England City and Town Area -New England City and Town Area-State\n340: ['state', 'csa'], # State-Combined Statistical Area\n341: ['state', 'csa', 'cbsa'], # State-Combined Statistical Area-CBSA\n345: ['state', 'cnecta'], # State-Combined New England City and Town Area\n346: ['state', 'cnecta', 'necta'], # State-Combined New England City and Town Area-New England City and Town Area\n350: ['necta'], # New England City and Town Area\n351: ['necta', 'state'], # New England City and Town Area-State\n352: ['necta', 'state', 'place'], # New England City and Town Area-State-Principal City\n353: ['necta', 'state', 'county'], # New England City and Town Area-State-County\n354: ['necta', 'state', 'county', 'cousub'], # New England City and Town Area-State-County-County Subdivision\n355: ['necta', 'nectadiv'], # New England City and Town Area (NECTA)-NECTA Division\n356: ['necta', 'nectadiv', 'state'], # New England City and Town Area (NECTA)-NECTA Division-State\n357: ['necta', 'nectadiv', 'state', 'county'], # New England City and Town Area (NECTA)-NECTA Division-State-County\n358: ['necta', 'nectadiv', 'state', 'county', 'cousub'], # New England City and Town Area (NECTA)-NECTA Division-State-County-County Subdivision\n360: ['state', 'necta'], # State-New England City and Town Area\n361: ['state', 'necta', 'place'], # State-New England City and Town Area-Principal City\n362: ['state', 'necta', 'county'], # State-New England City and Town Area-County\n363: ['state', 'necta', 'county', 'cousub'], # State-New England City and Town Area-County-County Subdivision\n364: ['state', 'necta', 'nectadiv'], # State-New England City and Town Area (NECTA)-NECTA Division\n365: ['state', 'necta', 'nectadiv', 'county'], # State-New England City and Town Area (NECTA)-NECTA Division-County\n366: ['state', 'necta', 'nectadiv', 'county', 'cousub'], # State-New England City and Town Area (NECTA)-NECTA Division-County-County Subdivision\n400: ['ua'], # Urban Area\n500: ['state', 'cdcurr'], # Congressional District\n510: ['state', 'cdcurr', 'county'], # \n550: ['state', 'cdcurr', 'aianhh'], # Congressional District-American IndianArea/Alaska NativeArea/Hawaiian Home Land\n610: ['state', 'sldu'], # State Senate District\n612: ['state', 'sldu', 'county'], # State Senate District-County\n620: ['state', 'sldl'], # State House District\n622: ['state', 'sldl', 'county'], # State House District-County\n795: ['state', 'puma5'], # State-Public Use MicroSample Area 5%\n950: ['state', 'sdelm'], # State-Elementary School District\n960: ['state', 'sdsec'], # State-High School District\n970: ['state', 'sduni'], # State-Unified School District\n" ], [ "from geoid import names, segments\nnames_map = {v:k for k, v in names.items()}\n\nseen = set()\n\nfor k, v in segments.items():\n if k in names_map:\n pass\n else:\n \n name = '_'.join( e for e in v)\n name = name[0].lower() + name[1:]\n \n if name in seen:\n name += str(k)\n \n seen.add(name)\n \n print \"'{}': {},\".format(name, k)", "'state_aianhh': 260,\n'necta_nectadiv_state_county_cousub': 358,\n'state_sldl': 620,\n'state_aianhh_place': 269,\n'aianhh_state_county': 270,\n'state_cbsa_metdiv': 323,\n'state_sldu': 610,\n'state_aianhh280': 280,\n'state_place_county': 155,\n'aianhh_aitsce_state': 290,\n'state_aianhh_aihhtli': 283,\n'state_cdcurr_aianhh': 550,\n'state_concit': 170,\n'state_concit_place': 172,\n'state_aianhh_aihhtli286': 286,\n'cbsa': 310,\n'cbsa_state': 311,\n'cbsa_state_place': 312,\n'cbsa_state_county': 313,\n'cbsa_metdiv': 314,\n'cbsa_metdiv_state': 315,\n'state_cbsa': 320,\n'state_cbsa_place': 321,\n'state_cbsa_county': 322,\n'state_county_cousub_submcd': 67,\n'state_cbsa_metdiv_county': 324,\n'state_county_cousub_place': 70,\n'necta_state_county': 353,\n'state_puma5': 795,\n'csa': 330,\n'csa_state': 331,\n'csa_cbsa': 332,\n'csa_cbsa_state': 333,\n'cnecta': 335,\n'state_county_cousub_place_tract': 80,\n'cnecta_necta': 337,\n'cnecta_necta_state': 338,\n'state_csa': 340,\n'state_csa_cbsa': 341,\n'state_cnecta': 345,\n'state_cnecta_necta': 346,\n'necta': 350,\n'necta_state': 351,\n'necta_state_place': 352,\n'cnecta_state': 336,\n'necta_state_county_cousub': 354,\n'necta_nectadiv': 355,\n'necta_nectadiv_state': 356,\n'state_anrc': 230,\n'necta_nectadiv_state_county': 357,\n'state_necta': 360,\n'cbsa_metdiv_state_county': 316,\n'state_necta_county': 362,\n'state_necta_county_cousub': 363,\n'state_necta_nectadiv': 364,\n'state_necta_nectadiv_county': 365,\n'state_necta_nectadiv_county_cousub': 366,\n'state_sldu_county': 612,\n'state_cdcurr': 500,\n'state_cdcurr_county': 510,\n'state_necta_place': 361,\n'aianhh': 250,\n'aianhh_aitsce': 251,\n'aianhh_aihhtli': 252,\n'state_sldl_county': 622,\n'aianhh_aihhtli254': 254,\n" ], [ "%load_ext autoreload\n%autoreload 2\nfrom geoid.acs import AcsGeoid\n\nfor p in b.partitions:\n \n for i, row in enumerate(p.stream(as_dict=True)):\n if i >= 500:\n break\n \n geoid = row['geoid']\n \n try:\n AcsGeoid.parse(geoid)\n \n except Exception as e:\n print geoid, e\n raise\n ", "The autoreload extension is already loaded. To reload it, use:\n %reload_ext autoreload\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code" ] ]
cbfd58355cef96ee55df0fff8a1396689422ac69
158,754
ipynb
Jupyter Notebook
parametricSN/notebooks/Deformation.ipynb
sgaut023/kymatio_mod
2323cc3f0ed4aab5d73566216f1139b53976d78f
[ "BSD-3-Clause" ]
3
2021-07-13T00:00:47.000Z
2022-02-15T01:10:26.000Z
parametricSN/notebooks/Deformation.ipynb
sgaut023/kymatio_mod
2323cc3f0ed4aab5d73566216f1139b53976d78f
[ "BSD-3-Clause" ]
null
null
null
parametricSN/notebooks/Deformation.ipynb
sgaut023/kymatio_mod
2323cc3f0ed4aab5d73566216f1139b53976d78f
[ "BSD-3-Clause" ]
2
2021-09-02T14:03:30.000Z
2021-11-10T10:31:42.000Z
265.03172
11,864
0.915271
[ [ [ "import sys\nfrom pathlib import Path \nsys.path.append(str(Path.cwd().parent.parent))\nimport numpy as np\nfrom kymatio.scattering2d.core.scattering2d import scattering2d\nimport matplotlib.pyplot as plt\nimport torch\nimport torchvision\nfrom kymatio import Scattering2D\nfrom PIL import Image\nfrom IPython.display import display \nfrom torchvision.transforms import *", "_____no_output_____" ], [ "#img = Image.open('/NOBACKUP/gauthiers/KTH/sample_a/wood/54a-scale_10_im_10_col.png')\nimg = Image.open('/NOBACKUP/gauthiers/chest_xrays_preprocess/train/positive/MIDRC-RICORD-1C-SITE2-000216-21074-0.png')\nrsz_transf = torchvision.transforms.Resize((128,128))\nimg = rsz_transf(img)\ndisplay(img)", "_____no_output_____" ] ], [ [ "Rotation", "_____no_output_____" ] ], [ [ "transformation = torchvision.transforms.RandomRotation(degrees = 45)\ntransformation.degrees = [45,45]\nimg_rot2 = transformation(img)\ndisplay(img_rot2)", "_____no_output_____" ] ], [ [ "Blur", "_____no_output_____" ] ], [ [ "transformation = torchvision.transforms.GaussianBlur(3)\nimg_blur = transformation(img)\ndisplay(img_blur)", "_____no_output_____" ] ], [ [ "Perspective", "_____no_output_____" ] ], [ [ "transformation = torchvision.transforms.RandomPerspective()\nimg_rdmPersp = transformation(img)\ndisplay(img_rdmPersp)", "/home/alseneracil/.conda/envs/parametricSN/lib/python3.7/site-packages/torchvision/transforms/functional.py:594: UserWarning: torch.lstsq is deprecated in favor of torch.linalg.lstsq and will be removed in a future PyTorch release.\ntorch.linalg.lstsq has reversed arguments and does not return the QR decomposition in the returned tuple (although it returns other information about the problem).\nTo get the qr decomposition consider using torch.linalg.qr.\nThe returned solution in torch.lstsq stored the residuals of the solution in the last m - n columns of the returned value whenever m > n. In torch.linalg.lstsq, the residuals in the field 'residuals' of the returned named tuple.\nThe unpacking of the solution, as in\nX, _ = torch.lstsq(B, A).solution[:A.size(1)]\nshould be replaced with\nX = torch.linalg.lstsq(A, B).solution (Triggered internally at /opt/conda/conda-bld/pytorch_1623448224956/work/aten/src/ATen/LegacyTHFunctionsCPU.cpp:389.)\n res = torch.lstsq(b_matrix, a_matrix)[0]\n" ], [ "transforms = torchvision.transforms.RandomPerspective(distortion_scale=0.5,p=1)\ntransforms.distortion_scale = 0.9\nimg_1 = transforms(img)\ndisplay(img_1)\n", "_____no_output_____" ], [ "transforms = torchvision.transforms.RandomAffine(degrees = 0, shear=90)\nimg_2 = transforms(img)\ndisplay(img_2)\n", "_____no_output_____" ] ], [ [ "À la Mallat", "_____no_output_____" ] ], [ [ "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\ndevice = torch.device('cpu')\nimport time\nt0 = time.time()\n# Function \\tau in Mallat's. Deform the index u. The function is chosen arbitrary as an example.\ntau = lambda u : (0.5*u[0]+0.3*u[1]**2,0.3*u[1])\n# Deform the index u for all u of the image.\ntau_mat = lambda grid : torch.tensor([[tau(grid[i,j,:]) for j in range(len(grid))] for i in range(len(grid))],device = device)\ntauV = lambda u : torch.stack([0.5*u[:,0]+0.3*u[:,1]**2,0.3*u[:,1]]).T\n\n\n# Deforms the image given a function \\tau.\ndef diffeo(img,tau):\n # Image to tensor\n transf = torchvision.transforms.ToTensor()\n img = transf(img).unsqueeze(0).to(device)\n # Number of pixels. Suppose square image.\n dim = img.shape[-1]\n # Create a (dim x dim) matrix of 2d vectors. Each vector represents the normalized position in the grid. \n # Normalized means (-1,-1) is top left and (1,1) is bottom right.\n grid = torch.tensor([[[x,y] for x in torch.linspace(-1,1,dim,device = device)] for y in torch.linspace(-1,1,dim,device = device)],device = device)\n # Apply u-tau(u) in Mallat's. \n grid_transf = (grid - tau_mat(grid)).unsqueeze(0)\n # Apply x(u-tau(u)) by interpolating the image at the index points given by grid_transf.\n img_transf = torch.nn.functional.grid_sample(img,grid_transf).squeeze(0)\n # Tensor to image\n transf = torchvision.transforms.ToPILImage()\n return transf(img_transf)\n\n# Calculate the deformation size : sup |J_{tau}(u)| over u.\ndef deformation_size(tau):\n # Set a precision. This is arbitrary.\n precision = 128\n # Create a (flatten) grid of points between (-1,-1) and (1,1). This is the same grid as in the previous\n # function (but flatten), but it feels arbitrary also.\n points = [torch.tensor([x,y],device = device) for x in torch.linspace(-1,1,precision,device = device) for y in torch.linspace(-1,1,precision,device = device)]\n # Evaluate the Jacobian of tau in each of those points. Returns a tensor of precision^2 x 2 x 2, i.e.\n # for each point in points the 2 x 2 jacobian. Is it necessary to compute on all points, or only on the\n # boundary would be sufficient?\n t1 = time.time()\n jac = torch.stack(list(map(lambda point : torch.stack(torch.autograd.functional.jacobian(tau,point)), points)))\n print(\"grad calc +\", (time.time()-t1))\n # Find the norm of those jacobians.\n norm_jac = torch.linalg.matrix_norm(jac,ord=2,dim=(1, 2))\n # Return the Jacobian with the biggest norm.\n return torch.max(norm_jac)\n\nimg_diffeo = diffeo(img,tau)\ndisplay(img_diffeo)\ndeformation_size(tau)\nprint(\"full notebook +\", (time.time()-t0))", "_____no_output_____" ], [ "tau(torch.randn((64,2)))", "_____no_output_____" ], [ "points = [torch.tensor([0.,0.]),torch.tensor([1.,2.])]\njac = torch.autograd.functional.jacobian(tau,points[0])\njac2 = torch.stack(jac)\njac = torch.autograd.functional.jacobian(tau,points[1])\njac3 = torch.stack(jac)\nn = 0\njac4 = torch.cat([jac2.unsqueeze(n),jac3.unsqueeze(n)],dim = n)\nprint(jac2)\nprint(jac3)\nprint(jac4)\nprint(jac4.shape)\n\njac5 = torch.cat([torch.stack(torch.autograd.functional.jacobian(tau,point)).unsqueeze(0) for point in points], dim = 0)\nprint(jac5)", "tensor([[0.5000, 0.0000],\n [0.0000, 0.3000]])\ntensor([[0.5000, 1.2000],\n [0.0000, 0.3000]])\ntensor([[[0.5000, 0.0000],\n [0.0000, 0.3000]],\n\n [[0.5000, 1.2000],\n [0.0000, 0.3000]]])\ntorch.Size([2, 2, 2])\ntensor([[[0.5000, 0.0000],\n [0.0000, 0.3000]],\n\n [[0.5000, 1.2000],\n [0.0000, 0.3000]]])\n" ], [ "points = [torch.tensor([0.,0.]),torch.tensor([1.,2.])]\njac = torch.stack(list(map(lambda point : torch.stack(torch.autograd.functional.jacobian(tau,point)), points)))\nprint(jac)\nprint(jac.shape)", "tensor([[[0.5000, 0.0000],\n [0.0000, 0.3000]],\n\n [[0.5000, 1.2000],\n [0.0000, 0.3000]]])\ntorch.Size([2, 2, 2])\n" ], [ "points = [torch.tensor([0.,0.]),torch.tensor([1.,2.])]\njac = torch.cat([torch.cat([x.unsqueeze(1) for x in torch.autograd.functional.jacobian(tau,point)],dim =1).unsqueeze(2) for point in points],dim = 2)\nprint(jac)\nprint(jac.shape)", "tensor([[[0.5000, 0.5000],\n [0.0000, 0.0000]],\n\n [[0.0000, 1.2000],\n [0.3000, 0.3000]]])\ntorch.Size([2, 2, 2])\n" ], [ "eps = 0.3\ntau = lambda u : (eps*u[0],eps*u[1])\ndisplay(diffeo(img,tau))", "_____no_output_____" ], [ "eps = 0.3\ntau = lambda u : (eps*u[1],eps*u[0])\ndisplay(diffeo(img,tau))", "_____no_output_____" ], [ "eps = 0.3\ntau = lambda u : (eps*(u[0]+u[1]),eps*(u[0]+u[1]))\ndisplay(diffeo(img,tau))", "_____no_output_____" ], [ "eps = 0.3\ntau = lambda u : (eps*(u[0]+u[1]),eps*(u[0]-u[1]))\ndisplay(diffeo(img,tau))", "_____no_output_____" ], [ "eps = 0.3\ntau = lambda u : (eps*(u[0]**2+u[1]**2),eps*(2*u[0]*u[1]))\ndisplay(diffeo(img,tau))", "_____no_output_____" ], [ "eps = 0.3\ntau = lambda u : (eps*(u[0]**2+u[1]**2),-eps*(2*u[0]*u[1]))\ndisplay(diffeo(img,tau))", "_____no_output_____" ], [ "eps = 0.3\ntau = lambda u : (torch.exp(eps*u[0])-1,torch.exp(eps*u[1])-1)\ndisplay(diffeo(img,tau))", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfd5d2f5aa95d23177cee5c2cf0da4f5c62ffa0
142,747
ipynb
Jupyter Notebook
docs_src/basic_train.ipynb
jtrofe/fastai
fae3e4a9d25408912f8911629aa60eec0a68c91b
[ "Apache-2.0" ]
null
null
null
docs_src/basic_train.ipynb
jtrofe/fastai
fae3e4a9d25408912f8911629aa60eec0a68c91b
[ "Apache-2.0" ]
null
null
null
docs_src/basic_train.ipynb
jtrofe/fastai
fae3e4a9d25408912f8911629aa60eec0a68c91b
[ "Apache-2.0" ]
null
null
null
66.363087
29,536
0.774048
[ [ [ "# Basic training functionality", "_____no_output_____" ] ], [ [ "from fastai.basic_train import *\nfrom fastai.gen_doc.nbdoc import *\nfrom fastai.vision import *\nfrom fastai.distributed import *", "_____no_output_____" ] ], [ [ "[`basic_train`](/basic_train.html#basic_train) wraps together the data (in a [`DataBunch`](/basic_data.html#DataBunch) object) with a pytorch model to define a [`Learner`](/basic_train.html#Learner) object. This is where the basic training loop is defined for the [`fit`](/basic_train.html#fit) function. The [`Learner`](/basic_train.html#Learner) object is the entry point of most of the [`Callback`](/callback.html#Callback) functions that will customize this training loop in different ways (and made available through the [`train`](/train.html#train) module), notably:\n\n - [`Learner.lr_find`](/train.html#lr_find) will launch an LR range test that will help you select a good learning rate\n - [`Learner.fit_one_cycle`](/train.html#fit_one_cycle) will launch a training using the 1cycle policy, to help you train your model fast.\n - [`Learner.to_fp16`](/train.html#to_fp16) will convert your model in half precision and help you launch a training in mixed precision.", "_____no_output_____" ] ], [ [ "show_doc(Learner, title_level=2)", "_____no_output_____" ] ], [ [ "The main purpose of [`Learner`](/basic_train.html#Learner) is to train `model` using [`Learner.fit`](/basic_train.html#Learner.fit). After every epoch, all *metrics* will be printed, and will also be available to callbacks.\n\nThe default weight decay will be `wd`, which will be handled using the method from [Fixing Weight Decay Regularization in Adam](https://arxiv.org/abs/1711.05101) if `true_wd` is set (otherwise it's L2 regularization). If `bn_wd` is False then weight decay will be removed from batchnorm layers, as recommended in [Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour](https://arxiv.org/abs/1706.02677). You can ensure that batchnorm layer learnable params are trained even for frozen layer groups, by enabling `train_bn`.\n\nTo use [discriminative layer training](#Discriminative-layer-training) pass an [`nn.Module`](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) for each layer group to be optimized with different settings.\n\nAny model files created will be saved in `path`/`model_dir`.\n\nYou can pass a list of [`callbacks`](/callbacks.html#callbacks) that you have already created, or (more commonly) simply pass a list of callback functions to `callback_fns` and each function will be called (passing `self`) on object initialization, with the results stored as callback objects. For a walk-through, see the [training overview](/training.html) page. You may also want to use an `application` to fit your model, e.g. using the [`create_cnn`](/vision.learner.html#create_cnn) method:", "_____no_output_____" ] ], [ [ "path = untar_data(URLs.MNIST_SAMPLE)\ndata = ImageDataBunch.from_folder(path)\nlearn = create_cnn(data, models.resnet18, metrics=accuracy)\nlearn.fit(1)", "_____no_output_____" ] ], [ [ "### Model fitting methods", "_____no_output_____" ] ], [ [ "show_doc(Learner.fit)", "_____no_output_____" ] ], [ [ "Uses [discriminative layer training](#Discriminative-layer-training) if multiple learning rates or weight decay values are passed. To control training behaviour, use the [`callback`](/callback.html#callback) system or one or more of the pre-defined [`callbacks`](/callbacks.html#callbacks).", "_____no_output_____" ] ], [ [ "show_doc(Learner.fit_one_cycle)", "_____no_output_____" ] ], [ [ "Uses the [`OneCycleScheduler`](/callbacks.one_cycle.html#OneCycleScheduler) callback.", "_____no_output_____" ] ], [ [ "show_doc(Learner.lr_find)", "_____no_output_____" ] ], [ [ "Runs the learning rate finder defined in [`LRFinder`](/callbacks.lr_finder.html#LRFinder), as discussed in [Cyclical Learning Rates for Training Neural Networks](https://arxiv.org/abs/1506.01186). ", "_____no_output_____" ], [ "### See results", "_____no_output_____" ] ], [ [ "show_doc(Learner.get_preds)", "_____no_output_____" ], [ "show_doc(Learner.validate)", "_____no_output_____" ], [ "show_doc(Learner.show_results)", "_____no_output_____" ], [ "show_doc(Learner.predict)", "_____no_output_____" ], [ "show_doc(Learner.pred_batch)", "_____no_output_____" ], [ "show_doc(Learner.interpret, full_name='interpret')", "_____no_output_____" ], [ "jekyll_note('This function only works in the vision application.')", "_____no_output_____" ] ], [ [ "### Model summary", "_____no_output_____" ] ], [ [ "show_doc(Learner.summary)", "_____no_output_____" ] ], [ [ "### Test time augmentation", "_____no_output_____" ] ], [ [ "show_doc(Learner.TTA, full_name = 'TTA')", "_____no_output_____" ] ], [ [ "Applies Test Time Augmentation to `learn` on the dataset `ds_type`. We take the average of our regular predictions (with a weight `beta`) with the average of predictions obtained through augmented versions of the training set (with a weight `1-beta`). The transforms decided for the training set are applied with a few changes `scale` controls the scale for zoom (which isn't random), the cropping isn't random but we make sure to get the four corners of the image. Flipping isn't random but applied once on each of those corner images (so that makes 8 augmented versions total).", "_____no_output_____" ], [ "### Gradient clipping", "_____no_output_____" ] ], [ [ "show_doc(Learner.clip_grad)", "_____no_output_____" ] ], [ [ "### Mixed precision training", "_____no_output_____" ] ], [ [ "show_doc(Learner.to_fp16)", "_____no_output_____" ] ], [ [ "Uses the [`MixedPrecision`](/callbacks.fp16.html#MixedPrecision) callback to train in mixed precision (i.e. forward and backward passes using fp16, with weight updates using fp32), using all [NVIDIA recommendations](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html) for ensuring speed and accuracy.", "_____no_output_____" ] ], [ [ "show_doc(Learner.to_fp32)", "_____no_output_____" ] ], [ [ "### Distributed training", "_____no_output_____" ] ], [ [ "show_doc(Learner.distributed, full_name='distributed')", "_____no_output_____" ] ], [ [ "### Discriminative layer training", "_____no_output_____" ], [ "When fitting a model you can pass a list of learning rates (and/or weight decay amounts), which will apply a different rate to each *layer group* (i.e. the parameters of each module in `self.layer_groups`). See the [Universal Language Model Fine-tuning for Text Classification](https://arxiv.org/abs/1801.06146) paper for details and experimental results in NLP (we also frequently use them successfully in computer vision, but have not published a paper on this topic yet). When working with a [`Learner`](/basic_train.html#Learner) on which you've called `split`, you can set hyperparameters in four ways:\n\n1. `param = [val1, val2 ..., valn]` (n = number of layer groups)\n2. `param = val`\n3. `param = slice(start,end)`\n4. `param = slice(end)`\n\nIf we chose to set it in way 1, we must specify a number of values exactly equal to the number of layer groups. If we chose to set it in way 2, the chosen value will be repeated for all layer groups. See [`Learner.lr_range`](/basic_train.html#Learner.lr_range) for an explanation of the `slice` syntax).\n\nHere's an example of how to use discriminative learning rates (note that you don't actually need to manually call [`Learner.split`](/basic_train.html#Learner.split) in this case, since fastai uses this exact function as the default split for `resnet18`; this is just to show how to customize it):", "_____no_output_____" ] ], [ [ "# creates 3 layer groups\nlearn.split(lambda m: (m[0][6], m[1]))\n# only randomly initialized head now trainable\nlearn.freeze()", "_____no_output_____" ], [ "learn.fit_one_cycle(1)", "_____no_output_____" ], [ "# all layers now trainable\nlearn.unfreeze()\n# optionally, separate LR and WD for each group\nlearn.fit_one_cycle(1, max_lr=(1e-4, 1e-3, 1e-2), wd=(1e-4,1e-4,1e-1))", "_____no_output_____" ], [ "show_doc(Learner.lr_range)", "_____no_output_____" ] ], [ [ "Rather than manually setting an LR for every group, it's often easier to use [`Learner.lr_range`](/basic_train.html#Learner.lr_range). This is a convenience method that returns one learning rate for each layer group. If you pass `slice(start,end)` then the first group's learning rate is `start`, the last is `end`, and the remaining are evenly geometrically spaced.\n\nIf you pass just `slice(end)` then the last group's learning rate is `end`, and all the other groups are `end/10`. For instance (for our learner that has 3 layer groups):", "_____no_output_____" ] ], [ [ "learn.lr_range(slice(1e-5,1e-3)), learn.lr_range(slice(1e-3))", "_____no_output_____" ], [ "show_doc(Learner.unfreeze)", "_____no_output_____" ] ], [ [ "Sets every layer group to *trainable* (i.e. `requires_grad=True`).", "_____no_output_____" ] ], [ [ "show_doc(Learner.freeze)", "_____no_output_____" ] ], [ [ "Sets every layer group except the last to *untrainable* (i.e. `requires_grad=False`).", "_____no_output_____" ] ], [ [ "show_doc(Learner.freeze_to)", "_____no_output_____" ], [ "show_doc(Learner.split)", "_____no_output_____" ] ], [ [ "A convenience method that sets `layer_groups` based on the result of [`split_model`](/torch_core.html#split_model). If `split_on` is a function, it calls that function and passes the result to [`split_model`](/torch_core.html#split_model) (see above for example).", "_____no_output_____" ], [ "### Saving and loading models", "_____no_output_____" ], [ "Simply call [`Learner.save`](/basic_train.html#Learner.save) and [`Learner.load`](/basic_train.html#Learner.load) to save and load models. Only the parameters are saved, not the actual architecture (so you'll need to create your model in the same way before loading weights back in). Models are saved to the `path`/`model_dir` directory.", "_____no_output_____" ] ], [ [ "show_doc(Learner.load)", "_____no_output_____" ], [ "show_doc(Learner.save)", "_____no_output_____" ] ], [ [ "### Deploying your model", "_____no_output_____" ], [ "When you are ready to put your model in production, export the minimal state of your [`Learner`](/basic_train.html#Learner) with", "_____no_output_____" ] ], [ [ "show_doc(Learner.export)", "_____no_output_____" ] ], [ [ "Then you can load it with the following function.", "_____no_output_____" ] ], [ [ "show_doc(load_learner)", "_____no_output_____" ] ], [ [ "You can find more information and multiple examples in [this tutorial](/tutorial.inference.html)", "_____no_output_____" ], [ "### Other methods", "_____no_output_____" ] ], [ [ "show_doc(Learner.init)", "_____no_output_____" ] ], [ [ "Initializes all weights (except batchnorm) using function `init`, which will often be from PyTorch's [`nn.init`](https://pytorch.org/docs/stable/nn.html#torch-nn-init) module.", "_____no_output_____" ] ], [ [ "show_doc(Learner.mixup)", "_____no_output_____" ] ], [ [ "Uses [`MixUpCallback`](/callbacks.mixup.html#MixUpCallback).", "_____no_output_____" ] ], [ [ "show_doc(Learner.backward)", "_____no_output_____" ], [ "show_doc(Learner.create_opt)", "_____no_output_____" ] ], [ [ "You generally won't need to call this yourself - it's used to create the [`optim`](https://pytorch.org/docs/stable/optim.html#module-torch.optim) optimizer before fitting the model.", "_____no_output_____" ] ], [ [ "show_doc(Learner.dl)", "_____no_output_____" ], [ "show_doc(Recorder, title_level=2)", "_____no_output_____" ] ], [ [ "A [`Learner`](/basic_train.html#Learner) creates a [`Recorder`](/basic_train.html#Recorder) object automatically - you do not need to explicitly pass it to `callback_fns` - because other callbacks rely on it being available. It stores the smoothed loss, hyperparameter values, and metrics for each batch, and provides plotting methods for each. Note that [`Learner`](/basic_train.html#Learner) automatically sets an attribute with the snake-cased name of each callback, so you can access this through `Learner.recorder`, as shown below.", "_____no_output_____" ], [ "### Plotting methods", "_____no_output_____" ] ], [ [ "show_doc(Recorder.plot)", "_____no_output_____" ] ], [ [ "This is mainly used with the learning rate finder, since it shows a scatterplot of loss vs learning rate.", "_____no_output_____" ] ], [ [ "learn = create_cnn(data, models.resnet18, metrics=accuracy)\nlearn.lr_find()\nlearn.recorder.plot()", "LR Finder is complete, type {learner_name}.recorder.plot() to see the graph.\n" ], [ "show_doc(Recorder.plot_losses)", "_____no_output_____" ] ], [ [ "Note that validation losses are only calculated once per epoch, whereas training losses are calculated after every batch.", "_____no_output_____" ] ], [ [ "learn.fit_one_cycle(2)\nlearn.recorder.plot_losses()", "_____no_output_____" ], [ "show_doc(Recorder.plot_lr)", "_____no_output_____" ], [ "learn.recorder.plot_lr(show_moms=True)", "_____no_output_____" ], [ "show_doc(Recorder.plot_metrics)", "_____no_output_____" ] ], [ [ "Note that metrics are only collected at the end of each epoch, so you'll need to train at least two epochs to have anything to show here.", "_____no_output_____" ] ], [ [ "learn.recorder.plot_metrics()", "_____no_output_____" ] ], [ [ "### Callback methods", "_____no_output_____" ], [ "You don't call these yourself - they're called by fastai's [`Callback`](/callback.html#Callback) system automatically to enable the class's functionality.", "_____no_output_____" ] ], [ [ "show_doc(Recorder.on_backward_begin)", "_____no_output_____" ], [ "show_doc(Recorder.on_batch_begin)", "_____no_output_____" ], [ "show_doc(Recorder.on_epoch_end)", "_____no_output_____" ], [ "show_doc(Recorder.on_train_begin)", "_____no_output_____" ] ], [ [ "### Inner functions", "_____no_output_____" ], [ "The following functions are used along the way by the [`Recorder`](/basic_train.html#Recorder) or can be called by other callbacks.", "_____no_output_____" ] ], [ [ "show_doc(Recorder.add_metrics)", "_____no_output_____" ], [ "show_doc(Recorder.add_metric_names)", "_____no_output_____" ], [ "show_doc(Recorder.format_stats)", "_____no_output_____" ] ], [ [ "## Module functions", "_____no_output_____" ], [ "Generally you'll want to use a [`Learner`](/basic_train.html#Learner) to train your model, since they provide a lot of functionality and make things easier. However, for ultimate flexibility, you can call the same underlying functions that [`Learner`](/basic_train.html#Learner) calls behind the scenes:", "_____no_output_____" ] ], [ [ "show_doc(fit)", "_____no_output_____" ] ], [ [ "Note that you have to create the `Optimizer` yourself if you call this function, whereas [`Learn.fit`](/basic_train.html#fit) creates it for you automatically.", "_____no_output_____" ] ], [ [ "show_doc(train_epoch)", "_____no_output_____" ] ], [ [ "You won't generally need to call this yourself - it's what [`fit`](/basic_train.html#fit) calls for each epoch.", "_____no_output_____" ] ], [ [ "show_doc(validate)", "_____no_output_____" ] ], [ [ "This is what [`fit`](/basic_train.html#fit) calls after each epoch. You can call it if you want to run inference on a [`DataLoader`](https://pytorch.org/docs/stable/data.html#torch.utils.data.DataLoader) manually.", "_____no_output_____" ] ], [ [ "show_doc(get_preds)", "_____no_output_____" ], [ "show_doc(loss_batch)", "_____no_output_____" ] ], [ [ "You won't generally need to call this yourself - it's what [`fit`](/basic_train.html#fit) and [`validate`](/basic_train.html#validate) call for each batch. It only does a backward pass if you set `opt`.", "_____no_output_____" ], [ "## Other classes", "_____no_output_____" ] ], [ [ "show_doc(LearnerCallback, title_level=3)", "_____no_output_____" ], [ "show_doc(RecordOnCPU, title_level=3)", "_____no_output_____" ] ], [ [ "## Undocumented Methods - Methods moved below this line will intentionally be hidden", "_____no_output_____" ] ], [ [ "show_doc(Learner.tta_only)", "_____no_output_____" ], [ "show_doc(Learner.TTA)", "_____no_output_____" ], [ "show_doc(RecordOnCPU.on_batch_begin)", "_____no_output_____" ] ], [ [ "## New Methods - Please document or move to the undocumented section", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ] ]
cbfd641fae1cc693ecd09cee6d64106e4d0c4d8d
5,340
ipynb
Jupyter Notebook
group.ipynb
mmjazzar/-python-snippets
fa53a6bc5c2f60b8a19677ae8dc848fdb6197589
[ "MIT" ]
null
null
null
group.ipynb
mmjazzar/-python-snippets
fa53a6bc5c2f60b8a19677ae8dc848fdb6197589
[ "MIT" ]
null
null
null
group.ipynb
mmjazzar/-python-snippets
fa53a6bc5c2f60b8a19677ae8dc848fdb6197589
[ "MIT" ]
null
null
null
23.946188
75
0.351498
[ [ [ "import numpy as np\nimport pandas as pd\nfrom pandas import Series,DataFrame\nfrom numpy.random import randn", "_____no_output_____" ], [ "dframe = DataFrame({'key1':['x','y','z','g','h'], 'k2':[1,2,1,2,1]\n ,'d1':np.random.randn(5)\n ,'d2':np.random.randn(5)})\ndframe", "_____no_output_____" ], [ "group1 = dframe['d1'].groupby(dframe['k2'])\ngroup1.mean()", "_____no_output_____" ], [ "dframe.groupby('k2').mean()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code" ] ]
cbfd648d2eaa6cb8f611d21af119c4cd8f180309
13,095
ipynb
Jupyter Notebook
w8/w8d3/NLP_data_preparation.ipynb
bmskarate/lighthouseMain
b2434f14f1378b89085d59f896c44eda5f74eecc
[ "MIT" ]
null
null
null
w8/w8d3/NLP_data_preparation.ipynb
bmskarate/lighthouseMain
b2434f14f1378b89085d59f896c44eda5f74eecc
[ "MIT" ]
null
null
null
w8/w8d3/NLP_data_preparation.ipynb
bmskarate/lighthouseMain
b2434f14f1378b89085d59f896c44eda5f74eecc
[ "MIT" ]
null
null
null
31.707022
924
0.533333
[ [ [ "# load text\nfilename = 'metamorphosis_clean.txt'\nfile = open(filename, 'rt')\ntext = file.read()\nfile.close()\n# open('metamorphosis_clean.txt', rt).read()", "_____no_output_____" ] ], [ [ "### split by whitespace", "_____no_output_____" ] ], [ [ "# load text\nfilename = 'metamorphosis_clean.txt'\nfile = open(filename, 'rt')\ntext = file.read()\nfile.close()\n# split into words by white space\nwords = text.split()\nprint(words[:100])", "_____no_output_____" ] ], [ [ "### convert to list of words and save again", "_____no_output_____" ], [ "Another approach might be to use the regex model (re) and split the document into words by selecting for strings of alphanumeric characters (a-z, A-Z, 0-9 and ‘_’).", "_____no_output_____" ] ], [ [ "# load text\nfilename = 'metamorphosis_clean.txt'\nfile = open(filename, 'rt')\ntext = file.read()\nfile.close()\n# split based on words only\nimport re\nwords = re.split(r'\\W+', text)\nprint(words[:100])", "['The', 'Project', 'Gutenberg', 'EBook', 'of', 'Metamorphosis,', 'by', 'Franz', 'Kafka', 'Translated', 'by', 'David', 'Wyllie.', 'This', 'eBook', 'is', 'for', 'the', 'use', 'of', 'anyone', 'anywhere', 'at', 'no', 'cost', 'and', 'with', 'almost', 'no', 'restrictions', 'whatsoever.', 'You', 'may', 'copy', 'it,', 'give', 'it', 'away', 'or', 're-use', 'it', 'under', 'the', 'terms', 'of', 'the', 'Project', 'Gutenberg', 'License', 'included', 'with', 'this', 'eBook', 'or', 'online', 'at', 'www.gutenberg.org', '**', 'This', 'is', 'a', 'COPYRIGHTED', 'Project', 'Gutenberg', 'eBook,', 'Details', 'Below', '**', '**', 'Please', 'follow', 'the', 'copyright', 'guidelines', 'in', 'this', 'file.', '**', 'Title:', 'Metamorphosis', 'Author:', 'Franz', 'Kafka', 'Translator:', 'David', 'Wyllie', 'Release', 'Date:', 'August', '16,', '2005', '[EBook', '#5200]', 'First', 'posted:', 'May', '13,', '2002', 'Last', 'updated:']\n" ] ], [ [ "### Split by whitespace and remove punctuation", "_____no_output_____" ] ], [ [ "import string\nprint(string.punctuation)", "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~\n" ], [ "# load text\nfilename = 'metamorphosis_clean.txt'\nfile = open(filename, 'rt')\ntext = file.read()\nfile.close()\n# split into words by white space\nwords = text.split()\n# remove punctuation from each word\nimport string\ntable = str.maketrans('', '', string.punctuation)\nstripped = [w.translate(table) for w in words]\nprint(stripped[:100])", "['The', 'Project', 'Gutenberg', 'EBook', 'of', 'Metamorphosis', 'by', 'Franz', 'Kafka', 'Translated', 'by', 'David', 'Wyllie', 'This', 'eBook', 'is', 'for', 'the', 'use', 'of', 'anyone', 'anywhere', 'at', 'no', 'cost', 'and', 'with', 'almost', 'no', 'restrictions', 'whatsoever', 'You', 'may', 'copy', 'it', 'give', 'it', 'away', 'or', 'reuse', 'it', 'under', 'the', 'terms', 'of', 'the', 'Project', 'Gutenberg', 'License', 'included', 'with', 'this', 'eBook', 'or', 'online', 'at', 'wwwgutenbergorg', '', 'This', 'is', 'a', 'COPYRIGHTED', 'Project', 'Gutenberg', 'eBook', 'Details', 'Below', '', '', 'Please', 'follow', 'the', 'copyright', 'guidelines', 'in', 'this', 'file', '', 'Title', 'Metamorphosis', 'Author', 'Franz', 'Kafka', 'Translator', 'David', 'Wyllie', 'Release', 'Date', 'August', '16', '2005', 'EBook', '5200', 'First', 'posted', 'May', '13', '2002', 'Last', 'updated']\n" ] ], [ [ "### normalizing case (like lower, here)", "_____no_output_____" ] ], [ [ "filename = 'metamorphosis_clean.txt'\nfile = open(filename, 'rt')\ntext = file.read()\nfile.close()\n# split into words by white space\nwords = text.split()\n# convert to lower case\nwords = [word.lower() for word in words]\nprint(words[:100])", "['the', 'project', 'gutenberg', 'ebook', 'of', 'metamorphosis,', 'by', 'franz', 'kafka', 'translated', 'by', 'david', 'wyllie.', 'this', 'ebook', 'is', 'for', 'the', 'use', 'of', 'anyone', 'anywhere', 'at', 'no', 'cost', 'and', 'with', 'almost', 'no', 'restrictions', 'whatsoever.', 'you', 'may', 'copy', 'it,', 'give', 'it', 'away', 'or', 're-use', 'it', 'under', 'the', 'terms', 'of', 'the', 'project', 'gutenberg', 'license', 'included', 'with', 'this', 'ebook', 'or', 'online', 'at', 'www.gutenberg.org', '**', 'this', 'is', 'a', 'copyrighted', 'project', 'gutenberg', 'ebook,', 'details', 'below', '**', '**', 'please', 'follow', 'the', 'copyright', 'guidelines', 'in', 'this', 'file.', '**', 'title:', 'metamorphosis', 'author:', 'franz', 'kafka', 'translator:', 'david', 'wyllie', 'release', 'date:', 'august', '16,', '2005', '[ebook', '#5200]', 'first', 'posted:', 'may', '13,', '2002', 'last', 'updated:']\n" ] ], [ [ "## NLTK installed", "_____no_output_____" ], [ "### split into sentences", "_____no_output_____" ] ], [ [ "# load data\nfilename = 'metamorphosis_clean.txt'\nfile = open(filename, 'rt')\ntext = file.read()\nfile.close()\n# split into sentences\n# then save each sentence to file, one per line\nfrom nltk import sent_tokenize\nsentences = sent_tokenize(text)\nprint(sentences[0])", "The Project Gutenberg EBook of Metamorphosis, by Franz Kafka\nTranslated by David Wyllie.\n" ] ], [ [ "### split into words", "_____no_output_____" ] ], [ [ "# load data\n# split into words\nfrom nltk.tokenize import word_tokenize\ntokens = word_tokenize(text)\nprint(tokens[:100])", "_____no_output_____" ] ], [ [ "### Filter out punctuation", "_____no_output_____" ] ], [ [ "# load data\n# split into words\nfrom nltk.tokenize import word_tokenize\ntokens = word_tokenize(text)\n# remove all tokens that are not alphabetic\nwords = [word for word in tokens if word.isalpha()]\nprint(words[:100])", "_____no_output_____" ] ], [ [ "### filter out stop words (and pipeline)", "_____no_output_____" ] ], [ [ "# from nltk.corpus import stopwords\n# stop_words = stopwords.words('english')\n# print(stop_words)\n# A stop word is a commonly used word \n# (such as “the”, “a”, “an”, “in”)", "_____no_output_____" ], [ "# load data\nfilename = 'metamorphosis_clean.txt'\nfile = open(filename, 'rt')\ntext = file.read()\nfile.close()\n# split into words\nfrom nltk.tokenize import word_tokenize\ntokens = word_tokenize(text)\n# convert to lower case\ntokens = [w.lower() for w in tokens]\n# remove punctuation from each word\nimport string\ntable = str.maketrans('', '', string.punctuation)\nstripped = [w.translate(table) for w in tokens]\n# remove remaining tokens that are not alphabetic\nwords = [word for word in stripped if word.isalpha()]\n# filter out stop words\nfrom nltk.corpus import stopwords\nstop_words = set(stopwords.words('english'))\nwords = [w for w in words if not w in stop_words]\nprint(words[:100])", "_____no_output_____" ] ], [ [ "### stem words", "_____no_output_____" ], [ "Stemming refers to the process of reducing each word to its root or base.\n\nFor example “fishing,” “fished,” “fisher” all reduce to the stem “fish.”", "_____no_output_____" ] ], [ [ "# load data\nfilename = 'metamorphosis_clean.txt'\nfile = open(filename, 'rt')\ntext = file.read()\nfile.close()\n# split into words\nfrom nltk.tokenize import word_tokenize\ntokens = word_tokenize(text)\n# stemming of words\nfrom nltk.stem.porter import PorterStemmer\nporter = PorterStemmer()\nstemmed = [porter.stem(word) for word in tokens]\nprint(stemmed[:100])", "['the', 'project', 'gutenberg', 'ebook', 'of', 'metamorphosi', ',', 'by', 'franz', 'kafka', 'translat', 'by', 'david', 'wylli', '.', 'thi', 'ebook', 'is', 'for', 'the', 'use', 'of', 'anyon', 'anywher', 'at', 'no', 'cost', 'and', 'with', 'almost', 'no', 'restrict', 'whatsoev', '.', 'you', 'may', 'copi', 'it', ',', 'give', 'it', 'away', 'or', 're-us', 'it', 'under', 'the', 'term', 'of', 'the', 'project', 'gutenberg', 'licens', 'includ', 'with', 'thi', 'ebook', 'or', 'onlin', 'at', 'www.gutenberg.org', '*', '*', 'thi', 'is', 'a', 'copyright', 'project', 'gutenberg', 'ebook', ',', 'detail', 'below', '*', '*', '*', '*', 'pleas', 'follow', 'the', 'copyright', 'guidelin', 'in', 'thi', 'file', '.', '*', '*', 'titl', ':', 'metamorphosi', 'author', ':', 'franz', 'kafka', 'translat', ':', 'david', 'wylli', 'releas']\n" ] ], [ [ "Here is a short list of additional considerations when cleaning text:\n\nHandling large documents and large collections of text documents that do not fit into memory.\nExtracting text from markup like HTML, PDF, or other structured document formats.\nTransliteration of characters from other languages into English.\nDecoding Unicode characters into a normalized form, such as UTF8.\nHandling of domain specific words, phrases, and acronyms.\nHandling or removing numbers, such as dates and amounts.\nLocating and correcting common typos and misspellings.\n…", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ] ]
cbfd6ab296de19428c8599d684d7f0ba491783f6
306,970
ipynb
Jupyter Notebook
RQ1.ipynb
francesca15/ADM-HW2
19c304a674363a83dd6e096e1c35630fa44914d5
[ "MIT" ]
null
null
null
RQ1.ipynb
francesca15/ADM-HW2
19c304a674363a83dd6e096e1c35630fa44914d5
[ "MIT" ]
null
null
null
RQ1.ipynb
francesca15/ADM-HW2
19c304a674363a83dd6e096e1c35630fa44914d5
[ "MIT" ]
null
null
null
52.518392
17,440
0.604346
[ [ [ "import datetime as dt", "_____no_output_____" ], [ "import pandas as pd", "_____no_output_____" ], [ "# Get dataframe of boroughs\n\ndf = pd.read_csv(\"taxi_zone_lookup.csv\")\ndf", "_____no_output_____" ], [ "# Create dictionary of boroughs, and build the list of locations for each borough \n# (6 boroughs vs of 265 NY locations)\n\ndfdict = {'EWR': [], 'Queens': [], 'Bronx': [], 'Manhattan': [], 'Staten Island': [], 'Brooklyn': [], 'Unknown': []}\n\nfor i in range(len(df)): \n dfdict[df[\"Borough\"][i]].append(df[\"LocationID\"][i])\n \ndel df", "_____no_output_____" ], [ "# JANUARY", "_____no_output_____" ], [ "# read yellow_tripdata_2018-01.csv (only pickup and dropoff datetime columns)\n# transform pickup and dropoff datetime from string to a datetime columns\ndate_columns = ['tpep_pickup_datetime', 'tpep_dropoff_datetime']\ndf1 = pd.read_csv('yellow_tripdata_2018-01.csv', usecols = ['tpep_pickup_datetime', 'tpep_dropoff_datetime', 'PULocationID'], parse_dates = date_columns)\ndf1", "_____no_output_____" ], [ "type(df1.tpep_pickup_datetime[0]) # verify the transformation for pickup datetime", "_____no_output_____" ], [ "type(df1.tpep_dropoff_datetime[0]) # verify the transformation for dropoff datetime", "_____no_output_____" ], [ "# consider only 2018 for pickup datetime\n\nkeep = df1[df1['tpep_pickup_datetime'].dt.year == 2018]\ndel df1", "_____no_output_____" ], [ "# consider only january for pickup datetime\n\nkeep = keep[keep['tpep_pickup_datetime'].dt.month == 1]", "_____no_output_____" ], [ "# same above for dropoff datetime\n\nkeep = keep[keep['tpep_dropoff_datetime'].dt.year == 2018]\nkeep = keep[keep['tpep_dropoff_datetime'].dt.month == 1]", "_____no_output_____" ], [ "# count daily trips based on pickup datetime\n# count total trips based on location\n\ndate_time_col = keep['tpep_pickup_datetime']\nlocation_col = keep['PULocationID']\n\n# create series \"keep_loc\", of counts by location\n# create series \"keep\", of daily counts\n\nkeep_loc = location_col.groupby(location_col).count()\nkeep = date_time_col.groupby(date_time_col.dt.day).count()\nkeep\n", "_____no_output_____" ], [ "# compute mean \n\nmean1 = keep.mean()\nmean1", "_____no_output_____" ], [ "del keep", "_____no_output_____" ], [ "# count total number of trips for each borough\n\n# this dictionary contains the count for each borough\n\nborough_count = {'EWR': 0, 'Queens': 0, 'Bronx': 0, 'Manhattan': 0, 'Staten Island': 0, 'Brooklyn': 0, 'Unknown': 0}\n\n# for every total count in keep_loc, get the corresponding LocationID, check to what borough it belongs, and \n# add the count for that location to the correct borough\n\nfor i in range(1, len(keep_loc)):\n LocationID = keep_loc.index[i]\n for borough in dfdict:\n if LocationID in dfdict[borough]:\n borough_count[borough] = borough_count[borough] + int(keep_loc[LocationID])\n \nborough_count", "_____no_output_____" ], [ "del keep_loc", "_____no_output_____" ], [ "# compute means from total counts (for every borough)\n\nfor borough in borough_count:\n \n # divide by the number of days in the month\n \n borough_count[borough] = borough_count[borough]/31\n \nborough_count", "_____no_output_____" ], [ "# Convert list of boroughs and list of daily means to DataFrame\n\ndf_borough = pd.DataFrame(list(borough_count.keys()), columns = [\"Borough\"])\ndf_means = pd.DataFrame(list(borough_count.values()), columns = [\"Daily mean\"])\nmerge = df_borough.join(df_means)\nmerge", "_____no_output_____" ], [ "# Plot an histogram\n\nbarplot = merge.plot(x=\"Borough\", y=\"Daily mean\", kind='bar', figsize = (15, 6), fontsize = 12)", "_____no_output_____" ], [ "# FEBRUARY", "_____no_output_____" ], [ "date_columns = ['tpep_pickup_datetime', 'tpep_dropoff_datetime']\ndf2 = pd.read_csv('yellow_tripdata_2018-02.csv', usecols = ['tpep_pickup_datetime', 'tpep_dropoff_datetime', 'PULocationID'], parse_dates = date_columns)\ndf2", "_____no_output_____" ], [ "keep2 = df2[df2['tpep_pickup_datetime'].dt.year == 2018]\ndel df2", "_____no_output_____" ], [ "keep2 = keep2[keep2['tpep_pickup_datetime'].dt.month == 2]", "_____no_output_____" ], [ "keep2 = keep2[keep2['tpep_dropoff_datetime'].dt.year == 2018]\nkeep2 = keep2[keep2['tpep_dropoff_datetime'].dt.month == 2]", "_____no_output_____" ], [ "date_time_col = keep2['tpep_pickup_datetime']\nlocation_col = keep2['PULocationID']\n\nkeep_loc = location_col.groupby(location_col).count()\nkeep2 = date_time_col.groupby(date_time_col.dt.day).count()\nkeep2", "_____no_output_____" ], [ "mean2 = keep2.mean()\nmean2", "_____no_output_____" ], [ "del keep2", "_____no_output_____" ], [ "borough_count = {'EWR': 0, 'Queens': 0, 'Bronx': 0, 'Manhattan': 0, 'Staten Island': 0, 'Brooklyn': 0, 'Unknown': 0}\n\nfor i in range(1, len(keep_loc)):\n LocationID = keep_loc.index[i]\n for borough in dfdict:\n if LocationID in dfdict[borough]:\n borough_count[borough] = borough_count[borough] + int(keep_loc[LocationID])\n \nborough_count", "_____no_output_____" ], [ "del keep_loc", "_____no_output_____" ], [ "for borough in borough_count: \n borough_count[borough] = borough_count[borough]/28\n \nborough_count", "_____no_output_____" ], [ "df_borough = pd.DataFrame(list(borough_count.keys()), columns = [\"Borough\"])\ndf_means = pd.DataFrame(list(borough_count.values()), columns = [\"Daily mean\"])\nmerge = df_borough.join(df_means)\nmerge", "_____no_output_____" ], [ "# Plot an histogram\n\nbarplot = merge.plot(x=\"Borough\", y=\"Daily mean\", kind='bar', figsize = (15, 6), fontsize = 12)", "_____no_output_____" ], [ "# MARCH", "_____no_output_____" ], [ "date_columns = ['tpep_pickup_datetime', 'tpep_dropoff_datetime']\ndf3 = pd.read_csv('yellow_tripdata_2018-03.csv', usecols = ['tpep_pickup_datetime', 'tpep_dropoff_datetime', 'PULocationID'], parse_dates = date_columns)\ndf3", "_____no_output_____" ], [ "keep3 = df3[df3['tpep_pickup_datetime'].dt.year == 2018]\ndel df3", "_____no_output_____" ], [ "keep3 = keep3[keep3['tpep_pickup_datetime'].dt.month == 3]", "_____no_output_____" ], [ "keep3 = keep3[keep3['tpep_dropoff_datetime'].dt.year == 2018]\nkeep3 = keep3[keep3['tpep_dropoff_datetime'].dt.month == 3]", "_____no_output_____" ], [ "date_time_col = keep3['tpep_pickup_datetime']\nlocation_col = keep3['PULocationID']\n\nkeep_loc = location_col.groupby(location_col).count()\nkeep3 = date_time_col.groupby(date_time_col.dt.day).count()\nkeep3", "_____no_output_____" ], [ "mean3 = keep3.mean()\nmean3", "_____no_output_____" ], [ "del keep3", "_____no_output_____" ], [ "borough_count = {'EWR': 0, 'Queens': 0, 'Bronx': 0, 'Manhattan': 0, 'Staten Island': 0, 'Brooklyn': 0, 'Unknown': 0}\n\nfor i in range(1, len(keep_loc)):\n LocationID = keep_loc.index[i]\n for borough in dfdict:\n if LocationID in dfdict[borough]:\n borough_count[borough] = borough_count[borough] + int(keep_loc[LocationID])\n \nborough_count", "_____no_output_____" ], [ "del keep_loc", "_____no_output_____" ], [ "for borough in borough_count: \n borough_count[borough] = borough_count[borough]/31\n \nborough_count", "_____no_output_____" ], [ "df_borough = pd.DataFrame(list(borough_count.keys()), columns = [\"Borough\"])\ndf_means = pd.DataFrame(list(borough_count.values()), columns = [\"Daily mean\"])\nmerge = df_borough.join(df_means)\nmerge", "_____no_output_____" ], [ "# Plot an histogram\n\nbarplot = merge.plot(x=\"Borough\", y=\"Daily mean\", kind='bar', figsize = (15, 6), fontsize = 12)", "_____no_output_____" ], [ "# APRIL", "_____no_output_____" ], [ "date_columns = ['tpep_pickup_datetime', 'tpep_dropoff_datetime']\ndf4 = pd.read_csv('yellow_tripdata_2018-04.csv', usecols = ['tpep_pickup_datetime', 'tpep_dropoff_datetime', 'PULocationID'], parse_dates = date_columns)\ndf4", "_____no_output_____" ], [ "keep4 = df4[df4['tpep_pickup_datetime'].dt.year == 2018]\ndel df4", "_____no_output_____" ], [ "keep4 = keep4[keep4['tpep_pickup_datetime'].dt.month == 4]", "_____no_output_____" ], [ "keep4 = keep4[keep4['tpep_dropoff_datetime'].dt.year == 2018]\nkeep4 = keep4[keep4['tpep_dropoff_datetime'].dt.month == 4]", "_____no_output_____" ], [ "date_time_col = keep4['tpep_pickup_datetime']\nlocation_col = keep4['PULocationID']\n\nkeep_loc = location_col.groupby(location_col).count()\nkeep4 = date_time_col.groupby(date_time_col.dt.day).count()\nkeep4", "_____no_output_____" ], [ "mean4 = keep4.mean()\nmean4", "_____no_output_____" ], [ "del keep4", "_____no_output_____" ], [ "borough_count = {'EWR': 0, 'Queens': 0, 'Bronx': 0, 'Manhattan': 0, 'Staten Island': 0, 'Brooklyn': 0, 'Unknown': 0}\n\nfor i in range(1, len(keep_loc)):\n LocationID = keep_loc.index[i]\n for borough in dfdict:\n if LocationID in dfdict[borough]:\n borough_count[borough] = borough_count[borough] + int(keep_loc[LocationID])\n \nborough_count", "_____no_output_____" ], [ "del keep_loc", "_____no_output_____" ], [ "for borough in borough_count: \n borough_count[borough] = borough_count[borough]/30\n \nborough_count", "_____no_output_____" ], [ "df_borough = pd.DataFrame(list(borough_count.keys()), columns = [\"Borough\"])\ndf_means = pd.DataFrame(list(borough_count.values()), columns = [\"Daily mean\"])\nmerge = df_borough.join(df_means)\nmerge", "_____no_output_____" ], [ "# Plot an histogram\n\nbarplot = merge.plot(x=\"Borough\", y=\"Daily mean\", kind='bar', figsize = (15, 6), fontsize = 12)", "_____no_output_____" ], [ "# MAY", "_____no_output_____" ], [ "date_columns = ['tpep_pickup_datetime', 'tpep_dropoff_datetime']\ndf5 = pd.read_csv('yellow_tripdata_2018-05.csv', usecols = ['tpep_pickup_datetime', 'tpep_dropoff_datetime', 'PULocationID'], parse_dates = date_columns)\ndf5", "_____no_output_____" ], [ "keep5 = df5[df5['tpep_pickup_datetime'].dt.year == 2018]\ndel df5", "_____no_output_____" ], [ "keep5 = keep5[keep5['tpep_pickup_datetime'].dt.month == 5]", "_____no_output_____" ], [ "keep5 = keep5[keep5['tpep_dropoff_datetime'].dt.year == 2018]\nkeep5 = keep5[keep5['tpep_dropoff_datetime'].dt.month == 5]", "_____no_output_____" ], [ "date_time_col = keep5['tpep_pickup_datetime']\nlocation_col = keep5['PULocationID']\n\nkeep_loc = location_col.groupby(location_col).count()\nkeep5 = date_time_col.groupby(date_time_col.dt.day).count()\nkeep5", "_____no_output_____" ], [ "mean5 = keep5.mean()\nmean5", "_____no_output_____" ], [ "del keep5", "_____no_output_____" ], [ "borough_count = {'EWR': 0, 'Queens': 0, 'Bronx': 0, 'Manhattan': 0, 'Staten Island': 0, 'Brooklyn': 0, 'Unknown': 0}\n\nfor i in range(1, len(keep_loc)):\n LocationID = keep_loc.index[i]\n for borough in dfdict:\n if LocationID in dfdict[borough]:\n borough_count[borough] = borough_count[borough] + int(keep_loc[LocationID])\n \nborough_count", "_____no_output_____" ], [ "del keep_loc", "_____no_output_____" ], [ "for borough in borough_count:\n borough_count[borough] = borough_count[borough]/31\n \nborough_count", "_____no_output_____" ], [ "df_borough = pd.DataFrame(list(borough_count.keys()), columns = [\"Borough\"])\ndf_means = pd.DataFrame(list(borough_count.values()), columns = [\"Daily mean\"])\nmerge = df_borough.join(df_means)\nmerge", "_____no_output_____" ], [ "# Plot an histogram\n\nbarplot = merge.plot(x=\"Borough\", y=\"Daily mean\", kind='bar', figsize = (15, 6), fontsize = 12)", "_____no_output_____" ], [ "# JUNE", "_____no_output_____" ], [ "date_columns = ['tpep_pickup_datetime', 'tpep_dropoff_datetime']\ndf6 = pd.read_csv('yellow_tripdata_2018-06.csv', usecols = ['tpep_pickup_datetime', 'tpep_dropoff_datetime', 'PULocationID'], parse_dates = date_columns)\ndf6", "_____no_output_____" ], [ "keep6 = df6[df6['tpep_pickup_datetime'].dt.year == 2018]\ndel df6", "_____no_output_____" ], [ "keep6 = keep6[keep6['tpep_pickup_datetime'].dt.month == 6]", "_____no_output_____" ], [ "keep6 = keep6[keep6['tpep_dropoff_datetime'].dt.year == 2018]\nkeep6 = keep6[keep6['tpep_dropoff_datetime'].dt.month == 6]", "_____no_output_____" ], [ "date_time_col = keep6['tpep_pickup_datetime']\nlocation_col = keep6['PULocationID']\n\nkeep_loc = location_col.groupby(location_col).count()\nkeep6 = date_time_col.groupby(date_time_col.dt.day).count()\nkeep6", "_____no_output_____" ], [ "mean6 = keep6.mean()\nmean6", "_____no_output_____" ], [ "del keep6", "_____no_output_____" ], [ "borough_count = {'EWR': 0, 'Queens': 0, 'Bronx': 0, 'Manhattan': 0, 'Staten Island': 0, 'Brooklyn': 0, 'Unknown': 0}\n\nfor i in range(1, len(keep_loc)):\n LocationID = keep_loc.index[i]\n for borough in dfdict:\n if LocationID in dfdict[borough]:\n borough_count[borough] = borough_count[borough] + int(keep_loc[LocationID])\n \nborough_count", "_____no_output_____" ], [ "del keep_loc", "_____no_output_____" ], [ "for borough in borough_count:\n borough_count[borough] = borough_count[borough]/30\n \nborough_count", "_____no_output_____" ], [ "df_borough = pd.DataFrame(list(borough_count.keys()), columns = [\"Borough\"])\ndf_means = pd.DataFrame(list(borough_count.values()), columns = [\"Daily mean\"])\nmerge = df_borough.join(df_means)\nmerge", "_____no_output_____" ], [ "# Plot an histogram\n\nbarplot = merge.plot(x=\"Borough\", y=\"Daily mean\", kind='bar', figsize = (15, 6), fontsize = 12)", "_____no_output_____" ], [ "# OVERALL NY ANALYSIS", "_____no_output_____" ], [ "# build list of means\n\nl1 = [mean1, mean2, mean3, mean4, mean5, mean6]\nl1", "_____no_output_____" ], [ "df_mean = pd.DataFrame(l1, columns = ['daily average'])", "_____no_output_____" ], [ "df_mean", "_____no_output_____" ], [ "l2 = ['january', 'february', 'march', 'april', 'may', 'june']", "_____no_output_____" ], [ "df_month = pd.DataFrame(l2, columns = ['month'])", "_____no_output_____" ], [ "df_month", "_____no_output_____" ], [ "merge = df_month.join(df_mean)\nmerge", "_____no_output_____" ], [ "barplot = merge.plot(x = 'month', y = 'daily average', kind = 'bar', figsize = (10, 6), fontsize = 12)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfd71fa6b64c092180ede40de7ee4ee8b8a7c39
78,613
ipynb
Jupyter Notebook
examples/natal.ipynb
adaj/geohunter
fbbb7492f79fa838c2080c90d5e8ac2066d29568
[ "MIT" ]
7
2019-04-30T14:35:04.000Z
2021-11-30T21:05:01.000Z
examples/natal.ipynb
adaj/geohunter
fbbb7492f79fa838c2080c90d5e8ac2066d29568
[ "MIT" ]
3
2019-05-07T19:30:21.000Z
2021-12-01T13:44:56.000Z
examples/natal.ipynb
adaj/geohunter
fbbb7492f79fa838c2080c90d5e8ac2066d29568
[ "MIT" ]
null
null
null
304.70155
36,280
0.911732
[ [ [ "import geohunter as gh\n\nwith gh.osm.Eagle() as api:\n \n state = api.get('(-8.02, -41.01, -3.0, -33.0)',\n largest_geom=True,\n name='Rio Grande do Norte')\n \n city = api.get('(-8.02, -41.01, -3.0, -33.0)',\n largest_geom=True,\n name='Natal')\n \n poi = api.get(city,\n amenity=['school', 'hospital'],\n highway='primary',\n natural='*')\n \n all_cities = api.get(state, sjoin_op='within',\n admin_level='8')\n \n biggest_city = api.get(state, sjoin_op='within', largest_geom=True,\n admin_level='8')", "Geohunter: [TIMELOG] get -- {'largest_geom': True, 'name': 'Rio Grande do Norte'} -- Completed in 2.8099s\nGeohunter: [TIMELOG] get -- {'largest_geom': True, 'name': 'Natal'} -- Completed in 1.2079s\nGeohunter: [TIMELOG] get -- {'amenity': ['school', 'hospital'], 'highway': 'primary', 'natural': '*'} -- Completed in 5.8873s\nGeohunter: [TIMELOG] get -- {'sjoin_op': 'within', 'admin_level': '8'} -- Completed in 3.1931s\nGeohunter: [TIMELOG] get -- {'sjoin_op': 'within', 'largest_geom': True, 'admin_level': '8'} -- Completed in 2.7193s\n" ], [ "ax = state.plot()\nall_cities.plot(ax=ax, color='orange')\nbiggest_city.plot(ax=ax, color='red')\ncity.plot(ax=ax, color='yellow')", "_____no_output_____" ], [ "poi.head()", "_____no_output_____" ], [ "ax = city.plot()\npoi.plot(ax=ax, column='item')", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code" ] ]
cbfd7f18c62263813e7aed06aa3273d87f193621
18,613
ipynb
Jupyter Notebook
convokit/forecaster/tests/cumulativeBoW_demo.ipynb
sophieball/Cornell-Conversational-Analysis-Toolkit
da65358baffc34a518114be2d94f1748f8e01240
[ "MIT" ]
371
2016-07-19T22:10:13.000Z
2022-03-28T08:04:32.000Z
convokit/forecaster/tests/cumulativeBoW_demo.ipynb
sophieball/Cornell-Conversational-Analysis-Toolkit
da65358baffc34a518114be2d94f1748f8e01240
[ "MIT" ]
92
2017-07-25T22:04:11.000Z
2022-03-29T13:46:07.000Z
convokit/forecaster/tests/cumulativeBoW_demo.ipynb
sophieball/Cornell-Conversational-Analysis-Toolkit
da65358baffc34a518114be2d94f1748f8e01240
[ "MIT" ]
105
2016-07-04T15:04:53.000Z
2022-03-30T01:36:38.000Z
36.496078
965
0.596787
[ [ [ "from convokit import Corpus, download", "_____no_output_____" ], [ "corpus = Corpus(filename=download('subreddit-Cornell'))", "Dataset already exists at /Users/calebchiam/.convokit/downloads/subreddit-Cornell\n" ], [ "corpus.print_summary_stats()", "Number of Users: 7568\nNumber of Utterances: 74467\nNumber of Conversations: 10744\n" ] ], [ [ "## Some new Conversation functionality", "_____no_output_____" ] ], [ [ "convo = corpus.get_conversation('o31u0')", "_____no_output_____" ], [ "convo.print_conversation_structure()", "cchambo\n jklol\n djnap\n Brimwoodboy\n jklol\n" ], [ "convo.print_conversation_structure(lambda utt: utt.id)", "o31u0\n c3dzmtu\n c3e0ou0\n c3f7l5b\n c3feqc4\n" ], [ "convo.get_chronological_utterance_list()", "_____no_output_____" ], [ "[utt.user.id for utt in convo.get_chronological_utterance_list()]", "_____no_output_____" ], [ "convo.get_root_to_leaf_paths()", "_____no_output_____" ], [ "for path in convo.get_root_to_leaf_paths():\n print([utt.user.id for utt in path])", "['cchambo', 'jklol']\n['cchambo', 'djnap']\n['cchambo', 'Brimwoodboy', 'jklol']\n" ] ], [ [ "## Cumulative BoW", "_____no_output_____" ] ], [ [ "from convokit import Forecaster", "_____no_output_____" ] ], [ [ "Let's set up a forecasting task to predict for whether a Reddit comment will have a positive score, i.e. upvotes > downvotes.", "_____no_output_____" ] ], [ [ "# Adding a 'y' feature to fit to\nfor utt in corpus.iter_utterances():\n utt.add_meta('pos_score', int(utt.meta['score'] > 0))", "_____no_output_____" ], [ "forecaster = Forecaster(label_func=lambda utt: utt.meta['pos_score'], skip_broken_convos=True)", "No model passed to Forecaster. Initializing default forecaster model: Cumulative Bag-of-words...\nInitializing default unigram CountVectorizer...\nInitializing default classification model (standard scaled logistic regression)\n" ], [ "forecaster.fit(corpus)", "Fitting cumulative BoW classification model...\nDone.\n" ], [ "forecaster.transform(corpus)", "_____no_output_____" ], [ "forecast_df = forecaster.summarize(corpus)", "_____no_output_____" ], [ "forecast_df.shape", "_____no_output_____" ], [ "forecast_df.head()", "_____no_output_____" ], [ "forecast_df.tail(10)", "_____no_output_____" ] ], [ [ "Let's examine a Conversation that has an utterance forecasted to have negative score.", "_____no_output_____" ] ], [ [ "corpus.get_utterance('dpn8e4v')", "_____no_output_____" ], [ "corpus.get_utterance('dpn8e4v').root", "_____no_output_____" ], [ "corpus.get_conversation(corpus.get_utterance('dpn8e4v').root).print_conversation_structure()", "_____no_output_____" ] ], [ [ "### Forecasted", "_____no_output_____" ] ], [ [ "corpus.get_conversation(corpus.get_utterance('dpn8e4v').root).print_conversation_structure(lambda utt: str(utt.meta['forecast']))", "_____no_output_____" ] ], [ [ "### Actual", "_____no_output_____" ] ], [ [ "corpus.get_conversation(corpus.get_utterance('dpn8e4v').root).print_conversation_structure(lambda utt: str(utt.meta['pos_score']))", "_____no_output_____" ], [ "forecasts = [utt.meta['forecast'] for utt in corpus.iter_utterances()]\nactual = [utt.meta['pos_score'] for utt in corpus.iter_utterances()]", "_____no_output_____" ], [ "y_true_pred = [(forecast, actual) for forecast, actual in zip(forecasts, actual) if forecast is not None]", "_____no_output_____" ], [ "import numpy as np\nfrom collections import Counter", "_____no_output_____" ], [ "y_pred = np.array([x[0] for x in y_true_pred])", "_____no_output_____" ], [ "y_true = np.array([x[1] for x in y_true_pred])", "_____no_output_____" ], [ "# baseline accuracy: assume all positive\nnp.mean(y_true)", "_____no_output_____" ], [ "# achieved accuracy\nnp.mean(y_true == y_pred)", "_____no_output_____" ], [ "from sklearn.metrics import confusion_matrix", "_____no_output_____" ], [ "confusion_matrix(y_true=y_true, y_pred=y_pred)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfd81331bce2d12dbe13187f410f3e1cbb159af
303,450
ipynb
Jupyter Notebook
todoist/todoist_data_analysis.ipynb
Zackhardtoname/qs_ledger
77d15079e90be40429b99be8abaa5a51423585d8
[ "MIT" ]
755
2018-06-17T08:28:38.000Z
2022-03-27T05:37:02.000Z
todoist/todoist_data_analysis.ipynb
Zackhardtoname/qs_ledger
77d15079e90be40429b99be8abaa5a51423585d8
[ "MIT" ]
17
2019-03-31T08:26:09.000Z
2022-03-31T05:33:22.000Z
todoist/todoist_data_analysis.ipynb
Zackhardtoname/qs_ledger
77d15079e90be40429b99be8abaa5a51423585d8
[ "MIT" ]
195
2018-08-30T11:41:28.000Z
2022-03-31T11:35:20.000Z
307.446809
43,970
0.91505
[ [ [ "# Todoist Data Analysis", "_____no_output_____" ], [ "This notebook processed the downloaded history of your todoist tasks. See [todoist_downloader.ipynb](https://github.com/markwk/qs_ledger/blob/master/todoist/todoist_downloader.ipynb) to export and download your task history from Todoist. ", "_____no_output_____" ], [ "---", "_____no_output_____" ] ], [ [ "from datetime import date, datetime as dt, timedelta as td\nimport numpy as np\nimport pandas as pd\n\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nimport matplotlib.dates as mdates\n%matplotlib inline", "_____no_output_____" ], [ "# supress warnings\nimport warnings\nwarnings.filterwarnings('ignore')", "_____no_output_____" ] ], [ [ "---", "_____no_output_____" ], [ "# General Data Analysis of Todoist Tasks", "_____no_output_____" ] ], [ [ "# import raw data\nraw_tasks = pd.read_csv(\"data/todost-raw-tasks-completed.csv\")\nlen(raw_tasks)", "_____no_output_____" ], [ "# import processed data\ntasks = pd.read_csv(\"data/todost-tasks-completed.csv\")\nlen(tasks)", "_____no_output_____" ] ], [ [ "----", "_____no_output_____" ], [ "### Simple Data Analysis: Completed Tasks Per Year", "_____no_output_____" ] ], [ [ "year_data = tasks['year'].value_counts().sort_index()", "_____no_output_____" ], [ "# Chart Monthly Tasks Count \ndataset = year_data\nchart_title = 'Number of Tasks Completed Per Year'\n\nplt.style.use('seaborn-darkgrid')\nax = dataset.plot.bar(figsize=(14, 5), rot=0, legend=False)\nax.set_ylabel('Tasks Completed')\nax.set_xlabel('')\nax.set_title(chart_title)\nplt.show()", "_____no_output_____" ] ], [ [ "### Simple Data Analysis: Completed Tasks Per Month", "_____no_output_____" ] ], [ [ "# simple breakdown by month\ntotals_by_month = tasks['month'].value_counts().sort_index()", "_____no_output_____" ], [ "# Chart Monthly Tasks Count \ndataset = totals_by_month.tail(24)\nchart_title = 'Monthly Number of Tasks Completed (Last 24 Months)'\n\nplt.style.use('seaborn-darkgrid')\nax = dataset.plot.bar(figsize=(14, 5), rot=90, colormap='spring', stacked=True, legend=False)\nax.set_ylabel('Tasks Completed')\nax.set_xlabel('')\nax.set_title(chart_title)\nplt.show()", "_____no_output_____" ] ], [ [ "------", "_____no_output_____" ], [ "### Simple Data Analysis: Completed Tasks by Day of Week", "_____no_output_____" ] ], [ [ "totals_dow = tasks['dow'].value_counts().sort_index()", "_____no_output_____" ], [ "dataset = totals_dow\nchart_title = 'Completed Tasks by Day of Week'\n\nplt.style.use('seaborn-darkgrid')\nax = dataset.plot.bar(figsize=(14, 5), rot=0, colormap='autumn', stacked=True, legend=False)\nax.set_ylabel('# Completed')\nax.set_xlabel('')\nax.set_title(chart_title)\nplt.show()", "_____no_output_____" ] ], [ [ "-----", "_____no_output_____" ], [ "### Simple Data Analysis: Completed Tasks by Hour of the Day", "_____no_output_____" ] ], [ [ "hour_counts = tasks['hour'].value_counts().sort_index()\n\nax = hour_counts.plot(kind='line', figsize=[10, 4], linewidth=4, alpha=1, marker='o', color='#6684c1', \n markeredgecolor='#6684c1', markerfacecolor='w', markersize=8, markeredgewidth=2)\n\nxlabels = hour_counts.index.map(lambda x: '{:02}:00'.format(x))\nax.set_xticks(range(len(xlabels)))\nax.set_xticklabels(xlabels, rotation=45, rotation_mode='anchor', ha='right')\n\nax.set_xlim((hour_counts.index[0], hour_counts.index[-1]))\n\nax.yaxis.grid(True)\nhour_max = hour_counts.max()\nax.set_ylim((0, hour_max+20))\nax.set_ylabel('Number of Tasks')\nax.set_xlabel('', )\nax.set_title('Number of Tasks Completed per hour of the day', )\n\nplt.show()", "_____no_output_____" ] ], [ [ "----", "_____no_output_____" ], [ "## Daily Count of Tasks Completed", "_____no_output_____" ] ], [ [ "daily_counts = tasks['date'].value_counts().sort_index()", "_____no_output_____" ], [ "dataset = daily_counts.tail(30)\n\nchart_title = 'Number of Tasks Completed per Day'\n\nn_groups = len(dataset)\nindex = np.arange(n_groups)\n\nax = dataset.plot(kind='line', figsize=[12, 5], linewidth=4, alpha=1, marker='o', color='#6684c1', \n markeredgecolor='#6684c1', markerfacecolor='w', markersize=8, markeredgewidth=2)\n\nax.yaxis.grid(True)\nax.xaxis.grid(True)\n\nax.set_xticks(index)\nax.set_ylabel('Tasks Completed Count')\n# ax.set_xlabel('')\nplt.xticks(index, dataset.index, rotation=90)\nax.set_title(chart_title)\n\nplt.show()", "_____no_output_____" ], [ "# Export\ndaily_counts.to_csv(\"data/todoist-daily-completed.csv\", index=True)", "_____no_output_____" ] ], [ [ "-----", "_____no_output_____" ], [ "### Projects Breakdown", "_____no_output_____" ] ], [ [ "# Optionally pass a list of projects to exclude\nexclude_proj = ['Project1', 'Project2']\ntasks_data = tasks[~tasks.project_name.isin(exclude_proj)]\nproject_counts = tasks_data['project_name'].value_counts().sort_values(ascending=False)", "_____no_output_____" ], [ "# Chart Project Tasks \ndataset = project_counts.sort_values(ascending=True).tail(15)\nchart_title = 'Project Tasks Breakdown'\n\nplt.style.use('seaborn-darkgrid')\nax = dataset.plot.barh(y='Hours', figsize=(8, 8), colormap='plasma', legend=False)\nax.set_ylabel('')\nax.set_xlabel('Task #')\n\nax.set_title(chart_title)\nplt.show()", "_____no_output_____" ] ], [ [ "-----", "_____no_output_____" ], [ "## General Summary of Todoist Tasks", "_____no_output_____" ] ], [ [ "# Life-time Project Time Summary\nprint('====== Todoist Lifetime Summary ====== ')\nprint('Total Tasks Completed: {:,}'.format(len(tasks)))\ndaily_average = round(daily_counts.mean(),1)\nprint('Daily Task Average: {:,}'.format(daily_average))\nprint(' ')\nprint('Top 5 Days with Most Tasks Completed:')\nfor i, v in daily_counts.sort_values(ascending=False).head(5).items():\n print(v, 'tasks on ', i)", "====== Todoist Lifetime Summary ====== \nTotal Tasks Completed: 5,114\nDaily Task Average: 6.3\n \nTop 5 Days with Most Tasks Completed:\n22 tasks on 2016-08-29\n20 tasks on 2017-04-24\n18 tasks on 2016-10-18\n17 tasks on 2016-09-14\n15 tasks on 2017-11-08\n" ] ], [ [ "------", "_____no_output_____" ], [ "# Year in Review", "_____no_output_____" ] ], [ [ "# Set Year\ntarget_year = 2018", "_____no_output_____" ] ], [ [ "### Year: Top Projects", "_____no_output_____" ] ], [ [ "def yearly_top_projects_chart(year, exclude_projects=[]): \n year_data = tasks[tasks['year'] == year]\n # Optionally pass a list of projects to exclude\n if exclude_projects:\n exclude_proj = exclude_projects\n year_data = year_data[~tasks.project_name.isin(exclude_proj)]\n project_counts = year_data['project_name'].value_counts().sort_values(ascending=False)\n project_counts = year_data['project_name'].value_counts().sort_values(ascending=False)\n \n # Chart Project Tasks \n dataset = project_counts.sort_values(ascending=True).tail(10)\n chart_title = '{} Project Tasks Breakdown'.format(year)\n\n plt.style.use('seaborn-darkgrid')\n ax = dataset.plot.barh(y='Hours', figsize=(8, 8), colormap='plasma', legend=False)\n ax.set_ylabel('')\n ax.set_xlabel('Task #')\n\n ax.set_title(chart_title)\n plt.show()", "_____no_output_____" ], [ "# yearly_top_projects_chart(year=target_year, exclude_projects=['ProjectName', 'ProjectName2''])\nyearly_top_projects_chart(year=target_year)", "_____no_output_____" ] ], [ [ "### Year: Day of Week Comparison", "_____no_output_____" ] ], [ [ "def yearly_dow_chart(year): \n year_data = tasks[tasks['year'] == year]\n yearly_dow = year_data['dow'].value_counts().sort_index()\n days_of_week_list = ['Mon', 'Tues', 'Wed', 'Thurs', 'Friday', 'Sat', 'Sun']\n yearly_dow.index = days_of_week_list\n \n chart_title = '{} Tasks Completed by Day of Week | Yearly Total: {:,}'.format(year, yearly_dow.sum())\n plt.style.use('seaborn-darkgrid')\n ax = yearly_dow.plot.bar(stacked=True, rot=0, figsize=(12,4))\n ax.set_xlabel('')\n ax.set_ylabel('Hours')\n ax.set_title(chart_title)\n plt.show()", "_____no_output_____" ], [ "yearly_dow_chart(year=target_year)", "_____no_output_____" ] ], [ [ "### Year: Monthly Tasks Completed Chart", "_____no_output_____" ] ], [ [ "def yearly_months_chart(year): \n year_data = tasks[tasks['year'] == year]\n yearly_months = year_data['month'].value_counts().sort_index()\n months_of_year = ['Jan', 'Feb', 'March', 'April', 'May', 'June', 'July', \n 'Aug', 'Sept', 'Oct', 'Nov', 'Dec']\n yearly_months.index = months_of_year\n \n # Chart Monthly Tasks Count \n dataset = yearly_months\n chart_title = 'Monthly Number of Tasks Completed'\n\n plt.style.use('seaborn-darkgrid')\n ax = dataset.plot.bar(figsize=(14, 5), rot=0, colormap='spring', stacked=True, legend=False)\n ax.set_ylabel('Tasks Completed')\n ax.set_xlabel('')\n ax.set_title(chart_title)\n plt.show()", "_____no_output_____" ], [ "yearly_months_chart(year=target_year)", "_____no_output_____" ] ], [ [ "#### Year: Tasks Heat Map", "_____no_output_____" ] ], [ [ "# Helper Function to Create Heat Map from Data\n# Adapted from https://stackoverflow.com/questions/32485907/matplotlib-and-numpy-create-a-calendar-heatmap\nDAYS = ['Sun.', 'Mon.', 'Tues.', 'Wed.', 'Thurs.', 'Fri.', 'Sat.']\nMONTHS = ['Jan.', 'Feb.', 'Mar.', 'Apr.', 'May', 'June', 'July', 'Aug.', 'Sept.', 'Oct.', 'Nov.', 'Dec.']\n\n\ndef date_heatmap(series, start=None, end=None, mean=False, ax=None, **kwargs):\n '''Plot a calendar heatmap given a datetime series.\n\n Arguments:\n series (pd.Series):\n A series of numeric values with a datetime index. Values occurring\n on the same day are combined by sum.\n start (Any):\n The first day to be considered in the plot. The value can be\n anything accepted by :func:`pandas.to_datetime`. The default is the\n earliest date in the data.\n end (Any):\n The last day to be considered in the plot. The value can be\n anything accepted by :func:`pandas.to_datetime`. The default is the\n latest date in the data.\n mean (bool):\n Combine values occurring on the same day by mean instead of sum.\n ax (matplotlib.Axes or None):\n The axes on which to draw the heatmap. The default is the current\n axes in the :module:`~matplotlib.pyplot` API.\n **kwargs:\n Forwarded to :meth:`~matplotlib.Axes.pcolormesh` for drawing the\n heatmap.\n\n Returns:\n matplotlib.collections.Axes:\n The axes on which the heatmap was drawn. This is set as the current\n axes in the `~matplotlib.pyplot` API.\n '''\n # Combine values occurring on the same day.\n dates = series.index.floor('D')\n group = series.groupby(dates)\n series = group.mean() if mean else group.sum()\n\n # Parse start/end, defaulting to the min/max of the index.\n start = pd.to_datetime(start or series.index.min())\n end = pd.to_datetime(end or series.index.max())\n\n # We use [start, end) as a half-open interval below.\n end += np.timedelta64(1, 'D')\n\n # Get the previous/following Sunday to start/end.\n # Pandas and numpy day-of-week conventions are Monday=0 and Sunday=6.\n start_sun = start - np.timedelta64((start.dayofweek + 1) % 7, 'D')\n end_sun = end + np.timedelta64(7 - end.dayofweek - 1, 'D')\n\n # Create the heatmap and track ticks.\n num_weeks = (end_sun - start_sun).days // 7\n heatmap = np.zeros((7, num_weeks))\n ticks = {} # week number -> month name\n for week in range(num_weeks):\n for day in range(7):\n date = start_sun + np.timedelta64(7 * week + day, 'D')\n if date.day == 1:\n ticks[week] = MONTHS[date.month - 1]\n if date.dayofyear == 1:\n ticks[week] += f'\\n{date.year}'\n if start <= date < end:\n heatmap[day, week] = series.get(date, 0)\n\n # Get the coordinates, offset by 0.5 to align the ticks.\n y = np.arange(8) - 0.5\n x = np.arange(num_weeks + 1) - 0.5\n\n # Plot the heatmap. Prefer pcolormesh over imshow so that the figure can be\n # vectorized when saved to a compatible format. We must invert the axis for\n # pcolormesh, but not for imshow, so that it reads top-bottom, left-right.\n ax = ax or plt.gca()\n mesh = ax.pcolormesh(x, y, heatmap, **kwargs)\n ax.invert_yaxis()\n\n # Set the ticks.\n ax.set_xticks(list(ticks.keys()))\n ax.set_xticklabels(list(ticks.values()))\n ax.set_yticks(np.arange(7))\n ax.set_yticklabels(DAYS)\n\n # Set the current image and axes in the pyplot API.\n plt.sca(ax)\n plt.sci(mesh)\n\n return ax", "_____no_output_____" ], [ "def year_heat_chart(year): \n\n # Filter by Year\n year_data = tasks[(tasks['year'] == year)]\n \n # daily count\n year_dates_data = year_data['date'].value_counts().reset_index()\n year_dates_data.columns = ['date', 'count']\n year_dates_data['date'] = pd.to_datetime(year_dates_data['date'])\n\n # Generate all dates in that year\n first_date = str(year)+'-01-01'\n last_date = str(year)+'-12-31'\n all_dates = pd.date_range(start=first_date, end=last_date)\n all_dates = pd.DataFrame(all_dates, columns=['date'])\n \n # combine actual runs by date with total dates possible\n year_data = pd.merge(left=all_dates, right=year_dates_data, \n left_on=\"date\", right_on=\"date\", how=\"outer\")\n year_data['count'].fillna(0, inplace=True)\n year_data = year_data.set_index(pd.DatetimeIndex(year_data['date']))\n \n max_daily_count = round(year_data['count'].max(),2)\n \n # key stat and title\n total_tasks = round(year_data['count'].sum())\n chart_title = '{} Todoist Tasks Heatmap | Total Tasks: {:,}'.format(year, total_tasks)\n \n # set chart data\n data = year_data['count']\n data.index = year_data.index \n \n # plot data\n figsize = plt.figaspect(7 / 56)\n fig = plt.figure(figsize=figsize)\n ax = date_heatmap(data, edgecolor='black')\n max_count = int(round(data.max(),0))\n steps = int(round(max_count / 6, 0))\n plt.colorbar(ticks=range(0, max_count, steps), pad=0.02)\n cmap = mpl.cm.get_cmap('Purples', max_daily_count)\n plt.set_cmap(cmap)\n plt.clim(0, max_daily_count)\n ax.set_aspect('equal')\n ax.set_title(chart_title)\n plt.show()", "_____no_output_____" ], [ "year_heat_chart(year=target_year)", "_____no_output_____" ], [ "# compare previous year: \nyear_heat_chart(year=2017)", "_____no_output_____" ] ], [ [ "### Yearly Summary", "_____no_output_____" ] ], [ [ "def yearly_summary(year): \n print('====== {} Todoist Summary ======'.format(year))\n # Data Setup\n year_data = tasks[(tasks['year'] == year)]\n \n print('Total Tasks Completed: {:,}'.format(len(year_data)))\n \n daily_counts = year_data['date'].value_counts().sort_index()\n \n daily_average = round(daily_counts.mean(),1)\n print('Daily Task Average: {:,}'.format(daily_average))\n print(' ')\n project_counts = year_data['project_name'].value_counts()\n print('=== Top Projects ===')\n for i, v in project_counts.sort_values(ascending=False).head(7).items():\n print(\"* \", v, 'tasks on ', i)\n \n print(' ')\n print('=== Monthly Breakdown ===')\n monthly_counts = year_data['month'].value_counts().sort_index()\n print('Monthly Task Average: {:,}'.format(round(monthly_counts.mean(),1)))\n print('> Top 3 Months:')\n for i, v in monthly_counts.sort_values(ascending=False).head(3).items():\n print(\"* \", v, 'tasks on ', i)\n print('> Bottom 3 Months:')\n for i, v in monthly_counts.sort_values(ascending=True).head(3).items():\n print(\"* \", v, 'tasks on ', i) \n print(' ')\n print('Top 5 Days with Most Tasks Completed:')\n for i, v in daily_counts.sort_values(ascending=False).head(5).items():\n print(\"* \", v, 'tasks on ', i)", "_____no_output_____" ], [ "yearly_summary(year=target_year)", "====== 2018 Todoist Summary ======\nTotal Tasks Completed: 2,212\nDaily Task Average: 6.4\n \n=== Top Projects ===\n* 395 tasks on Data-Driven You\n* 284 tasks on Code Studies\n* 238 tasks on BookLoversCon\n* 187 tasks on Productivity, Self-Tracking\n* 146 tasks on Studies: General\n* 127 tasks on Writing\n* 107 tasks on Networking / Career\n \n=== Monthly Breakdown ===\nMonthly Task Average: 184.3\n> Top 3 Months:\n* 237 tasks on 2018-03\n* 224 tasks on 2018-04\n* 217 tasks on 2018-07\n> Bottom 3 Months:\n* 131 tasks on 2018-12\n* 138 tasks on 2018-08\n* 148 tasks on 2018-11\n \nTop 5 Days with Most Tasks Completed:\n* 14 tasks on 2018-03-19\n* 12 tasks on 2018-03-20\n* 12 tasks on 2018-03-26\n* 12 tasks on 2018-01-10\n* 12 tasks on 2018-05-17\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
cbfd9a292aa6403a75a3f42ee2f49cf4d28af6a0
4,287
ipynb
Jupyter Notebook
Homework1/Exercise1.ipynb
clementpoiret/PH526x-Harvard-Python-Research
214ad883a656ef57eea63b5bc250afc96ec9b83a
[ "Apache-2.0" ]
2
2019-11-02T21:32:29.000Z
2020-07-14T03:09:42.000Z
Homework1/Exercise1.ipynb
clementpoiret/PH526x-Harvard-Python-Research
214ad883a656ef57eea63b5bc250afc96ec9b83a
[ "Apache-2.0" ]
null
null
null
Homework1/Exercise1.ipynb
clementpoiret/PH526x-Harvard-Python-Research
214ad883a656ef57eea63b5bc250afc96ec9b83a
[ "Apache-2.0" ]
6
2019-08-31T15:54:39.000Z
2020-12-17T01:22:42.000Z
23.048387
274
0.411943
[ [ [ "import string\n\nalphabet = string.ascii_letters", "_____no_output_____" ], [ "sentence = 'Jim quickly realized that the beautiful gowns are expensive'\n\ncount_letters = {}\nfor letter in sentence: \n if letter in alphabet: \n if letter in count_letters:\n count_letters[letter] += 1\n else:\n count_letters[letter] = 1\n\ncount_letters", "_____no_output_____" ], [ "def counter(input_string):\n count_letters = {}\n for letter in input_string: \n if letter in alphabet: \n if letter in count_letters:\n count_letters[letter] += 1\n else:\n count_letters[letter] = 1\n return count_letters\n\ncounter(sentence)", "_____no_output_____" ], [ "address_count = {}\n\nwith open('address.txt') as f:\n address = f.readlines()\n \naddress = str([x.strip() for x in address])\n\naddress_count = counter(address)\n\nprint(address_count)", "{'F': 1, 'o': 93, 'u': 21, 'r': 80, 's': 44, 'c': 31, 'e': 167, 'a': 102, 'n': 76, 'd': 58, 'v': 24, 'y': 10, 'g': 27, 'f': 26, 't': 124, 'h': 81, 'b': 13, 'i': 65, 'w': 26, 'l': 42, 'p': 15, 'm': 13, 'q': 1, 'N': 1, 'W': 2, 'I': 3, 'B': 1, 'T': 2, 'k': 3, 'G': 1}\n" ], [ "position = 0\nmost_frequent_letter = str()\nmost_frequent = 0\n\nwhile position < len(alphabet):\n \n if alphabet[position] in address_count:\n number = address_count[alphabet[position]]\n if number > most_frequent:\n most_frequent = number\n position += 1\n else:\n position += 1\n else:\n number = 0\n position += 1\n \nfor letter, count in address_count.items():\n if count == most_frequent:\n most_frequent_letter = letter\n \nprint (most_frequent_letter)", "e\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code" ] ]
cbfd9b8b7126d8ad08fe317319c9357df40cbc43
12,457
ipynb
Jupyter Notebook
docs/source/estimator_intro.ipynb
beckernick/cuml
36b4c7fe2bb98d12bec12a22781c5ad0a7f0d964
[ "Apache-2.0" ]
null
null
null
docs/source/estimator_intro.ipynb
beckernick/cuml
36b4c7fe2bb98d12bec12a22781c5ad0a7f0d964
[ "Apache-2.0" ]
null
null
null
docs/source/estimator_intro.ipynb
beckernick/cuml
36b4c7fe2bb98d12bec12a22781c5ad0a7f0d964
[ "Apache-2.0" ]
null
null
null
43.708772
427
0.640764
[ [ [ "# Training and Evaluating Machine Learning Models in cuML\n\nThis notebook explores several basic machine learning estimators in cuML, demonstrating how to train them and evaluate them with built-in metrics functions. All of the models are trained on synthetic data, generated by cuML's dataset utilities.\n\n1. Random Forest Classifier\n2. UMAP\n3. DBSCAN\n4. Linear Regression\n\n\n[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/rapidsai/cuml/blob/tree/branch-0.14/docs/source/estimator_intro.ipynb)", "_____no_output_____" ], [ "## Classification", "_____no_output_____" ], [ "### Random Forest Classification and Accuracy metrics\n\nThe Random Forest algorithm classification model builds several decision trees, and aggregates each of their outputs to make a prediction. For more information on cuML's implementation of the Random Forest Classification model please refer to : \nhttps://docs.rapids.ai/api/cuml/stable/api.html#cuml.ensemble.RandomForestClassifier\n\nAccuracy score is the ratio of correct predictions to the total number of predictions. It is used to measure the performance of classification models. \nFor more information on the accuracy score metric please refer to: https://en.wikipedia.org/wiki/Accuracy_and_precision\n\nFor more information on cuML's implementation of accuracy score metrics please refer to: https://docs.rapids.ai/api/cuml/stable/api.html#cuml.metrics.accuracy.accuracy_score\n\nThe cell below shows an end to end pipeline of the Random Forest Classification model. Here the dataset was generated by using sklearn's make_blobs dataset. The generated dataset was used to train and run predict on the model. Random forest's performance is evaluated and then compared between the values obtained from the cuML and sklearn accuracy metrics.", "_____no_output_____" ] ], [ [ "import cuml\nimport cupy as cp\nimport numpy as np\n\nfrom cuml.datasets import make_blobs\nfrom cuml.ensemble import RandomForestClassifier as curfc\nfrom cuml.preprocessing.model_selection import train_test_split\n\nfrom sklearn.metrics import accuracy_score\n\nn_samples = 1000\nn_features = 10\nn_info = 7\n\nX_blobs, y_blobs = make_blobs(n_samples=n_samples, cluster_std=0.1,\n n_features=n_features, random_state=0,\n dtype=np.float32)\n\nX_blobs_train, X_blobs_test, y_blobs_train, y_blobs_test = train_test_split(X_blobs,\n y_blobs, train_size=0.8,\n random_state=10)\n\ncuml_class_model = curfc(max_features=1.0, n_bins=8, max_depth=10,\n split_algo=0, min_rows_per_node=2,\n n_estimators=30)\ncuml_class_model.fit(X_blobs_train, y_blobs_train)\ncu_preds = cuml_class_model.predict(X_blobs_test)\n\ncu_accuracy = cuml.metrics.accuracy_score(y_blobs_test, cu_preds)\n\n# convert cupy test labels to numpy since sklearn's accuracy_score function\n# does not accept cupy input\ny_blobs_test = cp.asnumpy(y_blobs_test)\nsk_accuracy = accuracy_score(y_blobs_test, cp.asnumpy(cu_preds))\n\nprint(\"cuml's accuracy score : \", cu_accuracy)\nprint(\"sklearn's accuracy score : \", sk_accuracy)", "_____no_output_____" ] ], [ [ "## Clustering", "_____no_output_____" ], [ "### UMAP and Trustworthiness metrics\nUMAP is a dimensionality reduction algorithm which performs non-linear dimension reduction. It can also be used for visualization.\nFor additional information on the UMAP model please refer to the documentation on https://docs.rapids.ai/api/cuml/stable/api.html#cuml.UMAP\n\nTrustworthiness is a measure of the extent to which the local structure is retained in the embedding of the model. Therefore, if a sample predicted by the model lied within the unexpected region of the nearest neighbors, then those samples would be penalized. For more information on the trustworthiness metric please refer to: https://scikit-learn.org/dev/modules/generated/sklearn.manifold.t_sne.trustworthiness.html\n\nthe documentation for cuML's implementation of the trustworthiness metric is: https://docs.rapids.ai/api/cuml/stable/api.html#cuml.metrics.trustworthiness.trustworthiness\n\nThe cell below shows an end to end pipeline of UMAP model. Here, the blobs dataset is created by cuml's equivalent of make_blobs function to be used as the input. The output of UMAP's fit_transform is evaluated using the trustworthiness function. The values obtained by sklearn and cuml's trustworthiness are compared below.\n", "_____no_output_____" ] ], [ [ "import cuml\nimport cupy as cp\nimport numpy as np\n\nfrom cuml.datasets import make_blobs\nfrom cuml.manifold.umap import UMAP as cuUMAP\n\nfrom sklearn.manifold import trustworthiness\n\n# Generate a datasets with 8 \"blobs\" of grouped-together points so we have an interesting structure to test DBSCAN clustering and UMAP\n\nn_samples = 2**10\nn_features = 100\n\ncenters = round(n_samples*0.4)\nX_blobs, y_blobs = make_blobs(n_samples=n_samples, cluster_std=0.1,\n n_features=n_features, random_state=0,\n dtype=np.float32)\n\nX_embedded = cuUMAP(n_neighbors=10).fit_transform(X_blobs)\n\ncu_score = cuml.metrics.trustworthiness(X_blobs, X_embedded)\n\n# convert cupy test labels to numpy since sklearn's trustworthiness function\n# does not accept cupy input\nX_blobs = cp.asnumpy(X_blobs)\nsk_score = trustworthiness(X_blobs, cp.asnumpy(X_embedded))\n\nprint(\" cuml's trustworthiness score : \", cu_score)\nprint(\" sklearn's trustworthiness score : \", sk_score)", "_____no_output_____" ] ], [ [ "### DBSCAN and Adjusted Random Index\nDBSCAN is a popular and a powerful clustering algorithm. For additional information on the DBSCAN model please refer to the documentation on https://docs.rapids.ai/api/cuml/stable/api.html#cuml.DBSCAN\n\nWe create the blobs dataset using the cuml equivalent of make_blobs function.\n\nAdjusted random index is a metric which is used to measure the similarity between two data clusters, and it is adjusted to take into consideration the chance grouping of elements.\nFor more information on Adjusted random index please refer to: https://en.wikipedia.org/wiki/Rand_index\n\nThe cell below shows an end to end model of DBSCAN. The output of DBSCAN's fit_predict is evaluated using the Adjusted Random Index function. The values obtained by sklearn and cuml's adjusted random metric are compared below.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport cuml\n\nfrom cuml.datasets import make_blobs\nfrom cuml import DBSCAN as cumlDBSCAN\n\nfrom sklearn.metrics import adjusted_rand_score\n\nn_samples = 2**10\nn_features = 100\n\ncenters = round(n_samples*0.4)\nX_blobs, y_blobs = make_blobs(n_samples=n_samples, cluster_std=0.01,\n n_features=n_features, random_state=0,\n dtype=np.float32)\n\ncuml_dbscan = cumlDBSCAN(eps=3, min_samples=2)\ncu_y_pred = cuml_dbscan.fit_predict(X_blobs)\n\ncu_adjusted_rand_index = cuml.metrics.cluster.adjusted_rand_score(y_blobs, cu_y_pred)\n\n# convert cupy test labels to numpy since sklearn's adjusted_rand_score function\n# does not accept cupy input\ny_blobs = cp.asnumpy(y_blobs)\nsk_adjusted_rand_index = adjusted_rand_score(y_blobs, cp.asnumpy(cu_y_pred))\n\nprint(\" cuml's adjusted random index score : \", cu_adjusted_rand_index)\nprint(\" sklearn's adjusted random index score : \", sk_adjusted_rand_index)", "_____no_output_____" ] ], [ [ "## Regression", "_____no_output_____" ], [ "### Linear regression and R^2 score\nLinear Regression is a simple machine learning model where the response y is modelled by a linear combination of the predictors in X.\n\nR^2 score is also known as the coefficient of determination. It is used as a metric for scoring regression models. It scores the output of the model based on the proportion of total variation of the model.\nFor more information on the R^2 score metrics please refer to: https://en.wikipedia.org/wiki/Coefficient_of_determination\n\nFor more information on cuML's implementation of the r2 score metrics please refer to : https://docs.rapids.ai/api/cuml/stable/api.html#cuml.metrics.regression.r2_score\n\nThe cell below uses the Linear Regression model to compare the results between cuML and sklearn trustworthiness metric. For more information on cuML's implementation of the Linear Regression model please refer to : \nhttps://docs.rapids.ai/api/cuml/stable/api.html#linear-regression", "_____no_output_____" ] ], [ [ "import numpy as np\nimport cuml\n\nfrom cuml.datasets import make_regression\nfrom cuml.linear_model import LinearRegression as culr\nfrom cuml.preprocessing.model_selection import train_test_split\n\nfrom sklearn.metrics import r2_score\n\nn_samples = 2**10\nn_features = 100\nn_info = 70\n\nX_reg, y_reg = make_regression(n_samples=n_samples, n_features=n_features,\n n_informative=n_info, random_state=123, dtype=np.float32)\n\n# using cuML's train_test_split function to divide the dataset into training and testing splits\nX_reg_train, X_reg_test, y_reg_train, y_reg_test = train_test_split(X_reg,\n y_reg, train_size=0.8,\n random_state=10)\ncuml_reg_model = culr(fit_intercept=True,\n normalize=True,\n algorithm='eig')\ncuml_reg_model.fit(X_reg_train,y_reg_train)\ncu_preds = cuml_reg_model.predict(X_reg_test)\n\ncu_r2 = cuml.metrics.r2_score(y_reg_test, cu_preds)\nsk_r2 = r2_score(y_reg_test, cu_preds)\n\nprint(\"cuml's r2 score : \", cu_r2)\nprint(\"sklearn's r2 score : \", sk_r2)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ] ]
cbfda4fbba6fb3256cf2fed7545845ce201ca04b
366,663
ipynb
Jupyter Notebook
Analysis.ipynb
jbn/who_follows_james_damore
a4da2877af899c5cd329308cef8f43f8779d2fda
[ "MIT" ]
null
null
null
Analysis.ipynb
jbn/who_follows_james_damore
a4da2877af899c5cd329308cef8f43f8779d2fda
[ "MIT" ]
null
null
null
Analysis.ipynb
jbn/who_follows_james_damore
a4da2877af899c5cd329308cef8f43f8779d2fda
[ "MIT" ]
null
null
null
670.316271
79,402
0.928237
[ [ [ "%matplotlib inline\n\nimport json\nimport os\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport seaborn as sns\n\nfrom functools import reduce\nfrom matplotlib.ticker import FuncFormatter\n\n\nDAMORE = '@Fired4Truth'\nDATA_DIR = os.path.join(\"data\", \"clean\")\n\nsns.set_palette(sns.xkcd_palette([\"windows blue\", \"amber\", \"greyish\"]))\nNORMAL, HIGHLIGHT, TEXT = sns.color_palette()\nTHOUSANDS_FORMATTER = FuncFormatter(lambda x, p: format(int(x), ','))\nPERCENT_FORMATTER = FuncFormatter(lambda x, p: \"{:3.0f}%\".format(x*100))\n\nsns.set_context('poster')\nsns.set_style('white')", "_____no_output_____" ], [ "def mutual_followers(followers, ego, *screen_names):\n return reduce(lambda acc, k: followers[k] & acc, \n screen_names, \n followers[ego])\n\ntest_followers = {'a': {1, 2, 3}, 'b': {2, 3, 4}, 'c': {3, 4, 5}}\nassert mutual_followers(test_followers, 'a') == {1, 2, 3}\nassert mutual_followers(test_followers, 'a', 'b') == {2, 3}\nassert mutual_followers(test_followers, 'a', 'b', 'c') == {3}\n\n\ndef load_followers(data_dir=DATA_DIR):\n followers = {}\n\n for name in os.listdir(DATA_DIR):\n if name.endswith(\".json\"):\n with open(os.path.join(DATA_DIR, name)) as fp:\n screen_name = '@' + name.replace(\".json\", \"\")\n followers[screen_name] = set(json.load(fp))\n \n return followers\n\n\ndef create_annotated_df(followers, target=DAMORE, others=None):\n others = others or []\n followers = load_followers()\n \n n_followers = {k: len(mutual_followers(followers, k, *others))\n for k in followers}\n df = pd.DataFrame({'n_followers': pd.Series(n_followers)})\n \n df['mutual_followers'] = [len(mutual_followers(followers, target, k, *others)) \n for k in df.index] \n\n df['mutual_proportion'] = df['mutual_followers'] / n_followers[target]\n \n return df\n\n\ndef sort_and_highlight(df, title, k, highlight_set, formatter):\n fig, ax = plt.subplots()\n\n df = df.sort_values(by=k)\n\n colors = [HIGHLIGHT if k in highlight_set else NORMAL \n for k in df.index]\n\n # Cheap hack. Couldn't get pandas do multiple colors.\n df[k].plot(kind='barh')\n plt.barh(range(df.shape[0]), \n df[k], \n height=0.6, \n color=colors)\n\n plt.title(title)\n ax.xaxis.set_major_formatter(formatter)\n sns.despine()\n return fig, ax", "_____no_output_____" ], [ "followers = load_followers()\ndf = create_annotated_df(followers)\ndf", "_____no_output_____" ], [ "sort_and_highlight(df, \n 'Total Twitter Followers', \n 'n_followers', \n {DAMORE}, \n THOUSANDS_FORMATTER);", "_____no_output_____" ], [ "sort_and_highlight(df, \n 'P[ Follows @ScreenName | Follows James Damore ]', \n 'mutual_proportion', \n {DAMORE}, PERCENT_FORMATTER);", "_____no_output_____" ], [ "followers = load_followers()\nsub_df = create_annotated_df(followers, target='@Fired4Truth', others=['@TechCrunch'])\nsub_df", "_____no_output_____" ], [ "sort_and_highlight(sub_df, \n 'P[ Follows @ScreenName | Follows Damore, Follows TechCrunch ]', \n 'mutual_proportion', \n {DAMORE, '@TechCrunch'}, \n PERCENT_FORMATTER);", "_____no_output_____" ], [ "fig, ax = sort_and_highlight(sub_df, \n 'P[ Follows @ScreenName | Follows James Damore, Follows TechCrunch ]', \n 'mutual_proportion', \n {'@travisk', '@sherylsandberg'}, \n PERCENT_FORMATTER)\n\nax.annotate('206,588 followers', xy=(0.15, 4), xytext=(0.35, 4),\n verticalalignment='center',\n arrowprops=dict(facecolor=TEXT, edgecolor=TEXT, shrink=0.05))\n\n\nax.annotate('242,680 followers', xy=(0.07, 1), xytext=(0.35, 1),\n verticalalignment='center',\n arrowprops=dict(facecolor=TEXT, edgecolor=TEXT, shrink=0.05));", "_____no_output_____" ], [ "fig, ax = sort_and_highlight(sub_df, \n 'P[ Follows @ScreenName | Follows James Damore, Follows TechCrunch ]', \n 'mutual_proportion', \n {'@marissamayer', '@PrisonPlanet'}, \n PERCENT_FORMATTER)\n\nax.annotate(' 681,727 followers', xy=(0.47, 10), xytext=(0.65, 10),\n verticalalignment='center',\n arrowprops=dict(facecolor=TEXT, edgecolor=TEXT, shrink=0.05))\n\n\nax.annotate('1,711,976 followers', xy=(0.2, 7), xytext=(0.65, 7),\n verticalalignment='center',\n arrowprops=dict(facecolor=TEXT, edgecolor=TEXT, shrink=0.05));", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfdb20ec24aa0bf8c1bf864fc9dccd915d89cdb
1,370
ipynb
Jupyter Notebook
gs_quant/documentation/06_baskets/examples/02_basket_pricing_data/0002_get_basket_close_price_for_dates.ipynb
webclinic017/gs-quant
ebb8ee5e1d954ab362aa567293906ce51818cfa8
[ "Apache-2.0" ]
4
2021-05-11T14:35:53.000Z
2022-03-14T03:52:34.000Z
gs_quant/documentation/06_baskets/examples/02_basket_pricing_data/0002_get_basket_close_price_for_dates.ipynb
webclinic017/gs-quant
ebb8ee5e1d954ab362aa567293906ce51818cfa8
[ "Apache-2.0" ]
null
null
null
gs_quant/documentation/06_baskets/examples/02_basket_pricing_data/0002_get_basket_close_price_for_dates.ipynb
webclinic017/gs-quant
ebb8ee5e1d954ab362aa567293906ce51818cfa8
[ "Apache-2.0" ]
null
null
null
20.447761
89
0.556204
[ [ [ "import datetime as dt\nfrom gs_quant.markets.baskets import Basket\nfrom gs_quant.session import Environment, GsSession", "_____no_output_____" ], [ "client = 'CLIENT ID'\nsecret = 'CLIENT SECRET'\n\nGsSession.use(Environment.PROD, client_id=client, client_secret=secret)", "_____no_output_____" ], [ "basket = Basket.get('GSMBXXXX') # substitute input with any identifier for a basket", "_____no_output_____" ], [ "basket.get_close_prices(dt.date(2021, 1, 7), dt.date(2021, 3, 27))", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code" ] ]
cbfdd30c71f3fe4e0400a8803205b06ed1c586c2
349,080
ipynb
Jupyter Notebook
experiments/job.simulation/make_figure.null_simulation.ipynb
martinjzhang/scDRS
69a9fb4e50dbfa6b1afe0dd222b0d349c5db00eb
[ "MIT" ]
24
2021-09-30T12:31:58.000Z
2022-03-28T01:14:39.000Z
experiments/job.simulation/make_figure.null_simulation.ipynb
martinjzhang/scDRS
69a9fb4e50dbfa6b1afe0dd222b0d349c5db00eb
[ "MIT" ]
5
2021-09-29T11:20:37.000Z
2022-03-06T21:53:08.000Z
experiments/job.simulation/make_figure.null_simulation.ipynb
martinjzhang/scDRS
69a9fb4e50dbfa6b1afe0dd222b0d349c5db00eb
[ "MIT" ]
null
null
null
639.340659
28,220
0.946557
[ [ [ "import scanpy as sc\nimport pandas as pd\nimport numpy as np\nimport scipy as sp\nfrom statsmodels.stats.multitest import multipletests\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport os\nfrom os.path import join\nimport time\n\nplt.rcParams['pdf.fonttype'] = 42\nplt.rcParams['ps.fonttype'] = 42\n\n# scTRS tools\nimport scdrs.util as util\nimport scdrs.data_loader as dl\nimport scdrs.method as md\n\n# autoreload\n%load_ext autoreload\n%autoreload 2", "_____no_output_____" ], [ "# Constants \nDATA_PATH='/n/holystore01/LABS/price_lab/Users/mjzhang/scDRS_data'\nOUT_PATH=DATA_PATH+'/results/fig_simu'\n\n# GS \nGS_LIST = ['%s_ngene%d'%(prefix, size) for prefix in ['all', 'highmean', 'highvar', 'highbvar']\n for size in [100, 500, 1000]]\ntemp_dic = {'all': 'random genes', 'highmean': 'random high mean-expr genes',\n 'highvar': 'random high variance genes', 'highbvar': 'random overdispersed genes'}\nDIC_GS_NAME = {x:x.split('_')[1].replace('ngene','')+' '+temp_dic[x.split('_')[0]] \n for x in GS_LIST}\n\n# DATA_LIST\nDATA_LIST = ['tms_facs.ncell_10k']\n\n# Results \nDIC_RES_PATH = {'sctrs': DATA_PATH+'/simulation_data/score_file/@d.@g',\n 'seurat': DATA_PATH+'/simulation_data/score_file/result_scanpy/@d.@g',\n 'vision': DATA_PATH+'/simulation_data/score_file/result_vision/@d.@g',\n 'vam': DATA_PATH+'/simulation_data/score_file/result_vam/@[email protected]'}\n\nMETHOD_LIST = list(DIC_RES_PATH.keys())\nDIC_METHOD_NAME = {'sctrs':'scDRS', 'seurat': 'Seurat', 'vision':'Vision', 'vam':'VAM'}\nDIC_METHOD_COLOR = {'sctrs':'C0', 'seurat': 'C1', 'vision':'C2', 'vam':'C3'}\n\nfor method in METHOD_LIST:\n if method not in DIC_METHOD_NAME.keys():\n DIC_METHOD_NAME[method] = method\n if method not in DIC_METHOD_COLOR.keys():\n DIC_METHOD_COLOR[method] = 'C%d'%len(DIC_METHOD_COLOR)", "_____no_output_____" ], [ "# Read results \nimport itertools\n\nq_list = 10**np.linspace(-3,0,30)\ndic_res = {}\n\nfor gs,dname,method in itertools.product(GS_LIST, DATA_LIST, METHOD_LIST):\n print(gs,dname,method)\n \n df_gs = pd.read_csv(DATA_PATH+'/simulation_data/gs_file/%s.gs'%gs, sep='\\t', index_col=0)\n df_res = pd.DataFrame(index=df_gs.index, columns=q_list, data=-1)\n\n # load scTRS results \n if method=='sctrs':\n for trait in df_gs.index:\n score_file = DIC_RES_PATH[method].replace('@d',dname).replace('@g',gs) + '/%s.score.gz'%trait\n if os.path.exists(score_file):\n temp_df = pd.read_csv(score_file, sep='\\t')\n df_res.loc[trait, q_list] = np.quantile(temp_df['pval'], q_list)\n else:\n print('# file missing: ', score_file)\n dic_res['%s:%s:%s'%(dname,gs,method)] = df_res.copy()\n \n # load vam results \n if method=='vam':\n score_file = DIC_RES_PATH[method].replace('@d',dname).replace('@g',gs) \n if os.path.exists(score_file):\n temp_df = pd.read_csv(score_file, sep='\\t')\n temp_df.columns = [x.replace('.','_') for x in temp_df.columns]\n drop_list = temp_df.columns[temp_df.mean(axis=0)>0.99]\n for trait in df_gs.index:\n if trait in drop_list: \n print('# %s dropped'%trait)\n continue\n df_res.loc[trait, q_list] = np.quantile(temp_df[trait], q_list)\n df_res = df_res.loc[(df_res==-1).sum(axis=1)==0]\n dic_res['%s:%s:%s'%(dname,gs,method)] = df_res.copy()\n else:\n print('# file missing: ', score_file)\n \n # load vision result \n if method=='vision':\n for trait in df_gs.index:\n score_file = DIC_RES_PATH[method].replace('@d',dname).replace('@g',gs) + '/%s.score.gz'%trait\n if os.path.exists(score_file):\n temp_df = pd.read_csv(score_file, sep='\\t')\n df_res.loc[trait, q_list] = np.quantile(temp_df['norm_pval'], q_list)\n else:\n print('# file missing: ', score_file)\n dic_res['%s:%s:%s'%(dname,gs,method)] = df_res.copy()\n \n # load seurat results\n if method=='seurat':\n for trait in df_gs.index:\n score_file = DIC_RES_PATH[method].replace('@d',dname).replace('@g',gs) + '/%s.score.gz'%trait\n if os.path.exists(score_file):\n temp_df = pd.read_csv(score_file, sep='\\t')\n df_res.loc[trait, q_list] = np.quantile(temp_df['pval'], q_list)\n else:\n print('# file missing: ', score_file)\n dic_res['%s:%s:%s'%(dname,gs,method)] = df_res.copy()", "all_ngene100 tms_facs.ncell_10k sctrs\nall_ngene100 tms_facs.ncell_10k seurat\nall_ngene100 tms_facs.ncell_10k vision\nall_ngene100 tms_facs.ncell_10k vam\nall_ngene500 tms_facs.ncell_10k sctrs\nall_ngene500 tms_facs.ncell_10k seurat\nall_ngene500 tms_facs.ncell_10k vision\nall_ngene500 tms_facs.ncell_10k vam\nall_ngene1000 tms_facs.ncell_10k sctrs\nall_ngene1000 tms_facs.ncell_10k seurat\nall_ngene1000 tms_facs.ncell_10k vision\nall_ngene1000 tms_facs.ncell_10k vam\nhighmean_ngene100 tms_facs.ncell_10k sctrs\nhighmean_ngene100 tms_facs.ncell_10k seurat\nhighmean_ngene100 tms_facs.ncell_10k vision\nhighmean_ngene100 tms_facs.ncell_10k vam\nhighmean_ngene500 tms_facs.ncell_10k sctrs\nhighmean_ngene500 tms_facs.ncell_10k seurat\nhighmean_ngene500 tms_facs.ncell_10k vision\nhighmean_ngene500 tms_facs.ncell_10k vam\nhighmean_ngene1000 tms_facs.ncell_10k sctrs\nhighmean_ngene1000 tms_facs.ncell_10k seurat\nhighmean_ngene1000 tms_facs.ncell_10k vision\nhighmean_ngene1000 tms_facs.ncell_10k vam\nhighvar_ngene100 tms_facs.ncell_10k sctrs\nhighvar_ngene100 tms_facs.ncell_10k seurat\nhighvar_ngene100 tms_facs.ncell_10k vision\nhighvar_ngene100 tms_facs.ncell_10k vam\n# highvar_ngene100_rep16 dropped\n# highvar_ngene100_rep59 dropped\nhighvar_ngene500 tms_facs.ncell_10k sctrs\nhighvar_ngene500 tms_facs.ncell_10k seurat\nhighvar_ngene500 tms_facs.ncell_10k vision\nhighvar_ngene500 tms_facs.ncell_10k vam\nhighvar_ngene1000 tms_facs.ncell_10k sctrs\nhighvar_ngene1000 tms_facs.ncell_10k seurat\nhighvar_ngene1000 tms_facs.ncell_10k vision\nhighvar_ngene1000 tms_facs.ncell_10k vam\nhighbvar_ngene100 tms_facs.ncell_10k sctrs\nhighbvar_ngene100 tms_facs.ncell_10k seurat\nhighbvar_ngene100 tms_facs.ncell_10k vision\nhighbvar_ngene100 tms_facs.ncell_10k vam\nhighbvar_ngene500 tms_facs.ncell_10k sctrs\nhighbvar_ngene500 tms_facs.ncell_10k seurat\nhighbvar_ngene500 tms_facs.ncell_10k vision\nhighbvar_ngene500 tms_facs.ncell_10k vam\nhighbvar_ngene1000 tms_facs.ncell_10k sctrs\nhighbvar_ngene1000 tms_facs.ncell_10k seurat\nhighbvar_ngene1000 tms_facs.ncell_10k vision\nhighbvar_ngene1000 tms_facs.ncell_10k vam\n" ], [ "# Q-Q plot\ndname = 'tms_facs.ncell_10k'\nplot_list = ['%s:%s'%(dname, x) for x in GS_LIST]\nplot_method_list = ['sctrs', 'vision', 'seurat', 'vam']\n\nfor plot_name in plot_list:\n dname,gs=plot_name.split(':')\n df_plot_mean = pd.DataFrame(index=q_list, columns=plot_method_list, data=-1)\n df_plot_se = pd.DataFrame(index=q_list, columns=plot_method_list, data=-1)\n \n for method in plot_method_list:\n res = '%s:%s'%(plot_name,method)\n temp_df = dic_res[res][q_list].loc[(dic_res[res][q_list]==-1).sum(axis=1)==0]\n \n df_plot_mean.loc[q_list, method] = temp_df.mean(axis=0)\n df_plot_se.loc[q_list, method] = temp_df.std(axis=0)/np.sqrt(temp_df.shape[0])\n \n df_plot_mean = df_plot_mean.clip(lower=1e-4)\n df_plot_se = df_plot_se.clip(lower=1e-10) \n \n # Compute distance and p-value\n df_plot_dist = np.absolute(np.log10(df_plot_mean.T)-np.log10(df_plot_mean.index)).T\n df_plot_dist.drop(1, axis=0, inplace=True)\n df_plot_dist = df_plot_dist.max(axis=0)\n \n temp_df = np.absolute(df_plot_mean.T-df_plot_mean.index).T / df_plot_se\n df_plot_p = pd.DataFrame(index=df_plot_mean.index, columns=df_plot_mean.columns, \n data=(1-sp.stats.norm.cdf(temp_df))*2)\n df_plot_p.drop(1, axis=0, inplace=True)\n df_plot_p = df_plot_p.median(axis=0)\n \n # Plot \n plt.figure(figsize=[4.2,4])\n df_plot_logerr = np.log10(df_plot_mean+1.96*df_plot_se) - np.log10(df_plot_mean)\n for i_method,method in enumerate(plot_method_list):\n plt.errorbar(-np.log10(df_plot_mean.index), -np.log10(df_plot_mean[method]), \n yerr = df_plot_logerr[method], label=DIC_METHOD_NAME[method],\n fmt='.', markersize=4, elinewidth=1, color=DIC_METHOD_COLOR[method], zorder=8-i_method)\n\n plt.plot([0, 3], [0, 3], linestyle='--', linewidth=1, color='k', zorder=0)\n plt.xlabel('Theoretical -log10(p) quantiles')\n plt.ylabel('Actual -log10(p) quantiles')\n plt.yticks([0,0.5,1,1.5,2,2.5,3,3.5,4],[0,0.5,1,1.5,2,2.5,3,3.5,'>4'])\n plt.grid(linestyle='--', linewidth=0.5)\n if 'all' in gs:\n plt.title('Null simulations (%s)'%DIC_GS_NAME[gs])\n else:\n plt.title('Null simulations\\n(%s)'%DIC_GS_NAME[gs])\n plt.legend()\n plt.tight_layout()\n plt.savefig(OUT_PATH+'/%s.%s.svg'%(dname,gs))\n plt.show()\n \n # Store data for the main figure 'tms_facs.ncell_10k:all_ngene1000'\n if plot_name=='tms_facs.ncell_10k:all_ngene1000':\n SUPP_TAB_PATH='/n/holystore01/LABS/price_lab/Users/mjzhang/scDRS_data/supp_table'\n df_plot_mean.columns = ['%s.mean'%x for x in df_plot_mean]\n df_plot_se.columns = ['%s.se'%x for x in df_plot_se]\n df_out = df_plot_mean.join(df_plot_se)\n df_out.index.name='quantile'\n df_out = df_out[['%s.%s'%(x,y) for x in plot_method_list for y in ['mean', 'se']]]\n df_out.to_csv(SUPP_TAB_PATH+'/supp_tab_fig2a.tsv', sep='\\t')", "_____no_output_____" ], [ "for method in plot_method_list:\n print(method, (np.log10(df_out['%s.mean'%method]+1.96*df_out['%s.se'%method])\n - np.log10(df_out['%s.mean'%method])).max())", "sctrs 0.03840265567895207\nvision 0.039289529620924135\nseurat 0.03902260206418218\nvam 0.0428203965511309\n" ] ], [ [ "### Cell type-disease association ", "_____no_output_____" ] ], [ [ "# Load single-cell data\nadata = sc.read_h5ad(DATA_PATH+'/simulation_data/single_cell_data/tms_facs.ncell_10k.h5ad')\n\n# Read full score\ndic_res_full = {}\nscore_file=DATA_PATH+'/simulation_data/score_file/tms_facs.ncell_10k.all_ngene1000'\ndf_gs = pd.read_csv(DATA_PATH+'/simulation_data/gs_file/all_ngene1000.gs', sep='\\t', index_col=0)\nfor trait in df_gs.index:\n if os.path.exists(score_file+'/%s.full_score.gz'%trait):\n dic_res_full[trait] = pd.read_csv(score_file+'/%s.full_score.gz'%trait, sep='\\t', index_col=0)\n else:\n print('# file missing: ', score_file)", "_____no_output_____" ], [ "# Cell type-disease association \ncelltype_list = sorted(set(adata.obs['cell_ontology_class']))\ntrait_list = list(df_gs.index)\ndf_stats = pd.DataFrame(index=celltype_list, columns=trait_list, dtype=float)\nfor trait in trait_list:\n for ct in celltype_list:\n cell_list = adata.obs_names[adata.obs['cell_ontology_class']==ct]\n temp_df = dic_res_full[trait].loc[cell_list].copy()\n score_q95 = np.quantile(temp_df['norm_score'], 0.95)\n temp_df = temp_df[[x for x in temp_df.columns if x.startswith('ctrl_norm_score')]]\n v_ctrl_score_q95 = np.quantile(temp_df, 0.95, axis=0)\n df_stats.loc[ct,trait] = ((v_ctrl_score_q95>=score_q95).sum()+1) / (v_ctrl_score_q95.shape[0]+1)", "_____no_output_____" ], [ "df_stats_fdr = df_stats.copy()\nprint('# n_celltype=%d, n_rep=%d'%df_stats_fdr.shape)\nfor col in df_stats_fdr:\n df_stats_fdr[col] = multipletests(df_stats[col], method='fdr_bh')[1]\nfor alpha in [0.05, 0.1, 0.2]:\n v_fd = (df_stats_fdr<alpha).sum(axis=0)\n v_d = v_fd.clip(lower=1)\n v_fdp = v_fd / v_d\n print('# alpha=%0.2f, FDP=%0.3f (SE=%0.3f)'\n %(alpha, v_fdp.mean(), 1.96*v_fdp.std()/np.sqrt(df_stats_fdr.shape[1])))", "# n_celltype=118, n_rep=100\n# alpha=0.05, FDP=0.000 (SE=0.000)\n# alpha=0.10, FDP=0.020 (SE=0.028)\n# alpha=0.20, FDP=0.180 (SE=0.076)\n" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
cbfde690c7c41a9618dceb46ad86adbe5c31da78
305,755
ipynb
Jupyter Notebook
TEMA-2/Clase19_ComprobacionDeDistribuciones.ipynb
Csainz17/SPF-2020-I-G1
2ad11bd21fce4ddd366d66a1ea9b6cb0a9431b60
[ "MIT" ]
null
null
null
TEMA-2/Clase19_ComprobacionDeDistribuciones.ipynb
Csainz17/SPF-2020-I-G1
2ad11bd21fce4ddd366d66a1ea9b6cb0a9431b60
[ "MIT" ]
null
null
null
TEMA-2/Clase19_ComprobacionDeDistribuciones.ipynb
Csainz17/SPF-2020-I-G1
2ad11bd21fce4ddd366d66a1ea9b6cb0a9431b60
[ "MIT" ]
null
null
null
334.890471
37,796
0.927341
[ [ [ "# Probando el ajuste de distribuciones hipotéticas\n", "_____no_output_____" ], [ "A veces, el conocimiento específico sugiere fuertes razones que justifiquen alguna suposición; de lo contrario, esto debería probarse de alguna manera. Cuando comprobamos si los datos experimentales se ajustan a una distribución de probabilidad dada, no estamos realmente probando una hipótesis sobre un parámetro o dos; de hecho, estamos ejecutando una prueba no paramétrica.\nEn esta sección ilustramos tres tipos de enfoque:\n- La prueba de chi-cuadrado, que es de propósito general y, en términos generales, verifica el ajuste en términos de histogramas y densidades.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport scipy.stats as st # Librería estadística\nimport statsmodels.api as sm \nimport matplotlib.pyplot as plt", "_____no_output_____" ] ], [ [ "Una trama Q-Q es generalmente un enfoque más poderoso, en lugar de simplemente usar la técnica común de comparación de histogramas de las dos muestras, pero requiere más habilidad para interpretar. Los diagramas Q-Q se usan comúnmente para comparar un conjunto de datos con un modelo teórico\n![imagen.png](attachment:imagen.png)\n\n## Interpretación \n\n- Si las dos distribuciones que se comparan son idénticas, la gráfica Q-Q sigue la línea de 45° $y = x$\n- Si las dos distribuciones concuerdan después de transformar linealmente los valores en una de las distribuciones, entonces la gráfica Q – Q sigue alguna línea, pero no necesariamente la línea y = x\n- Si la tendencia general de la gráfica Q-Q es más plana que la línea y = x, la distribución representada en el eje horizontal está más dispersa que la distribución representada en el eje vertical. \n- A la inversa, si la tendencia general de la gráfica Q-Q es más pronunciada que la línea y = x, la distribución representada en el eje vertical está más dispersa que la distribución representada en el eje horizontal.\n\n> Referencia: https://en.wikipedia.org/wiki/Q%E2%80%93Q_plot", "_____no_output_____" ] ], [ [ "# Creo variables aleatorias normales\nmu = 30; sigma = 10\nmeasurements = np.random.normal(loc = mu, scale = sigma, size=100) \n\n# Histograma de las variables creadas\ndivisiones = 10 # Cantidad de barras en el histograma\nplt.hist(measurements,divisiones,density=True)\nx = np.arange(0,60,.1)\ny = st.norm.pdf(x,loc = mu, scale = sigma)\nplt.plot(x,y,'r--')\nplt.ylabel('Probability')\nplt.grid()\nplt.show()\n\n# gráfica de Q-Q entre las muestras creadas y una curva normal\n# grap2 = st.probplot(measurements, dist=\"norm\", plot=plt)\ngrap1 = sm.qqplot(measurements, dist='norm',line='s')\nplt.grid()\n# plt.xlabel('Normal theorical quantiles')\n# plt.ylabel('Data theorical quantiles')\nplt.show()", "_____no_output_____" ], [ "######## t-student\nnsample = 100\n#A t distribution with small degrees of freedom:\nx = st.t.rvs(3, size=nsample)\n# res = st.probplot(x, plot=plt)\nsm.qqplot(x,dist='norm',line='45')\nplt.title('t with small df')\nplt.grid()\nplt.show()\n\n#A t distribution with larger degrees of freedom:\nx = st.t.rvs(505, size=nsample)\n# res = st.probplot(x, plot=plt)\nsm.qqplot(x,dist='norm',line='45')\nplt.title('t with larger df')\nplt.grid()\nplt.show()", "_____no_output_____" ], [ "y_norm = st.norm(*st.norm.fit(x)).pdf(x1)\nQ_norm = np.percentile(y_norm,[25,50,75])\nQ_norm", "_____no_output_____" ], [ "########### Ajustando una exponencial\nnsample = 500\n# Distribución exponencial con parámetro lambda = 0.7\nlamb = 1\nx = st.expon.rvs(loc=0,scale=1/lamb, size=nsample)\n\nf,ax = plt.subplots(1,2,figsize=(12,4))\n\nsm.qqplot(x,dist='norm',line='s',ax = ax[0])\nplt.title('Comparación de cuantiles exponenciales con normales')\nplt.ylim([0,3])\nplt.grid()\n# plt.show()\n\n# Gráfica box-plot\nplt.subplot(122)\nB = plt.boxplot(x)\nplt.grid()\nplt.show()\n\n# Obtener los valores de Q1 y Q3 de la gráfica\n[item.get_ydata()[0] for item in B['whiskers']]\n# Otra forma de obtener los quantiles \nQ_exp = np.percentile(x,[25,50,75])\n\nprint('Quantiles random exponential de la muestra ',Q_exp)\n", "_____no_output_____" ], [ "10//2", "_____no_output_____" ], [ "np.random.seed(5555)\n# Comparación de dos conjuntos de datos\nx = np.random.normal(loc=8.5, scale=2.5, size=100)\ny = np.random.normal(loc=8.0, scale=3.0, size=100)\n\n# Gráfica de las dos normales\nx1 = np.arange(-1,20,.1)\ny1 = st.norm.pdf(x1,loc=8.5, scale=2.5)\ny2 = st.norm.pdf(x1,loc=8.0, scale=3.0)\nplt.plot(x1,y1,x1,y2)\n\n# Comparación de quantiles de dos conjuntos de datos\nsm.qqplot_2samples(x, y,line='45')\n\nplt.show()\n", "_____no_output_____" ], [ "Q2 = np.arange(2,14,2)\nprint('Quantiles x=',np.percentile(x,Q2))\nprint('Quantiles y=',np.percentile(y,Q2))\nnp.percentile(x,Q2)-np.percentile(y,Q2)", "Quantiles x= [3.05240157 3.36923609 4.24031955 4.51690146 4.79619594 5.35938916]\nQuantiles y= [2.88591235 3.48215162 3.74052592 4.22447996 4.45836696 4.85921428]\n" ] ], [ [ "# La prueba chi cuadrado - Usando el histograma\n\n$$\\chi^2 = \\sum_{j=1}^J{(O_j-E_j)^2\\over E_j}$$\nEl estadístico anterior, tiene (aproximadamente) una distribución de chi-cuadrado. Deberíamos rechazar la hipótesis si $\\chi^2$ es demasiado grande, es decir, si $\\chi^2>\\chi^2_{1-\\alpha,m}$ donde:\n\n- $\\chi^2_{1-\\alpha,m}$ es un cuantil de la distribución de chi-cuadrado.\n- $\\alpha$ es el nivel de significancia de la prueba.\n- m es el número de grados de libertad.\n\nLo que nos falta aquí es m, que depende del número de parámetros de la distribución que hemos estimado utilizando los datos. Si no se ha estimado ningún parámetro, es decir, si hemos asumido una distribución parametrizada específica antes de observar datos, los grados de libertad son $J - 1$; si tenemos parámetros p estimados, deberíamos usar $J - p - 1$, con $J$ la cantidad de particiones del histograma.", "_____no_output_____" ] ], [ [ "np.random.seed(555)\nmu_real = 10; sigma_real = 20\nN = 100 # Cantidad de muestras\nn1 = np.random.normal(mu_real,sigma_real,N)\nJ = 90 # Cantidad de particiones del histograma\n[freq,x,p]=plt.hist(n1,J,density=True)\nplt.show()\n# Se obvia el último valor de x para obtener exactamente J muestras de x\nx = x[:-1] \n\n# Media y desviación estándar muestral\nmu = np.mean(x)\nsigma = np.std(x)\nprint('media mu =%f, desviación estándar muestral =%f'%(mu,sigma))", "_____no_output_____" ] ], [ [ "### Se desea resolver la siguiente prueba de hipótesis\n> $H_0$: la distribución es normal con ´media= $\\mu$´ y ´desviación estándar = $\\sigma$´\n\n> $H_a$: los datos no se distribuyen normales", "_____no_output_____" ] ], [ [ "pi = st.norm.pdf(x,loc=mu,scale=sigma)\n# Cálculo de la esperanza usando la expresión teórica\nEi = x*pi\n# Cálculo teórico de la chi cuadrada\nx2 = np.sum(list(map(lambda Ei,obs_i:(obs_i-Ei)**2/Ei,Ei,freq)))\nprint('Valor de chi cuadrado teorico = ',x2)\n\n# Cálculo usando la librería estadística de la chi cuadrada\nX2 = st.chisquare(freq,Ei)\nprint('Valor de chi cuadrado librería = ',X2)\n\n# Cálculo de Grados de libertad del estadístico\np = 2 # Parámetros estimados con los datos\nm = J-p-1 # grados de libertad\n\nChi_est = st.chi2.ppf(q = 0.95,df=m)\nprint('Estadístico de chi_cuadrado = ',Chi_est)\nprint('Media muestral = ',mu,'\\nDesviación estándar muestral = ',sigma)", "Valor de chi cuadrado teorico = 7.7732802991551075\nValor de chi cuadrado librería = Power_divergenceResult(statistic=7.7732802991551075, pvalue=1.0)\nEstadístico de chi_cuadrado = 109.77330935028795\nMedia muestral = 10.682741365706983 \nDesviación estándar muestral = 26.18193710642433\n" ] ], [ [ "> **Conclusión**: No podemos rechazar la $H_0$ por lo tanto los datos distribuyen normales.", "_____no_output_____" ], [ "# Pruebas de correlación de Pearson\n\nEl análisis de correlación juega un papel importante en los modelos de Monte Carlo:\n- En el análisis de entrada, debemos verificar si algunas variables están correlacionadas para modelarlas correctamente.\n- Al aplicar la reducción de varianza por el método de números complementarios, puede ser importante verificar la fuerza de la correlación entre el estimador de Monte Carlo crudo y la variable de control que consideramos.\n\nEstá claro que la magnitud de la correlación se debe comparar con el tamaño de la muestra, y una estrategia simple es probar la hipótesis nula\n$$ H_0: \\rho_{XY}=0$$\ncontra la hipótesis alternativa\n$$ H_a: \\rho_{XY}\\neq0$$\n\nSin embargo, necesitamos una estadística cuya distribución bajo la hipótesis nula sea bastante manejable. Un resultado útil es que, si la muestra es normal, la estadística\n\n$$T=R_{XY}\\sqrt{{n-2 \\over 1-R_{XY}^2}}$$\n\nse distribuye aproximadamente como una variable t con n - 2 grados de libertad, para una muestra adecuadamente grande. Esto puede ser explotado para llegar a las pruebas de correlación.\n\nEntonces, si tenemos un conjunto de datos ${x_1, ..., x_n}$ que contiene n valores y otro conjunto de datos ${y_1, ..., y_n}$ que contiene n valores, entonces la fórmula para la correlación $R_{XY}$ es:\n\n$$R_{XY}={\\frac {\\sum _{i=1}^{n}(x_{i}-{\\bar {x}})(y_{i}-{\\bar {y}})}{{\\sqrt {\\sum _{i=1}^{n}(x_{i}-{\\bar {x}})^{2}}}{\\sqrt {\\sum _{i=1}^{n}(y_{i}-{\\bar {y}})^{2}}}}}$$ \nDonde:\n- n es el tamaño de la muestra\n- $x_{i},y_{i}$ son las muestras individuales indexadas con i.\n- ${\\bar {x}}={\\frac {1}{n}}\\sum _{i=1}^{n}x_{i}$ la media muestral; y análogamente para $\\bar {y}$.\n- Puede ser probado que $-1\\leq R_{XY} \\leq 1$, justo como su contraparte probabilistica $\\rho_{XY}$\n\n\n", "_____no_output_____" ] ], [ [ "np.random.seed(5555)\nN = 100\nZ = np.random.normal(size=N)\nx1 = np.random.normal(10,5,N)\nx2 = np.random.normal(30,8,N)\n\n# Comando que estima el valor del coeficiente de correlación de pearson \ncorr = st.pearsonr(x1+50*Z,x2+50*Z)\ncorr2 = st.pearsonr(x1,x2)\n\nRxy = corr[0]\nprint('Rxy = ',corr[0],', p-value = ',corr[1])\nprint('Rxy2 = ',corr2[0],',p-value2 = ',corr2[1])\n\n# Cálculo del p-value \nT = Rxy*np.sqrt((N-2)/(1-Rxy**2))\np_val = st.t.pdf(T,df=N-2)\nprint(p_val)", "Rxy = 0.9855483869036423 , p-value = 2.205241931694522e-77\nRxy2 = -0.09903662944768503 ,p-value2 = 0.3269244409953945\n1.822820243909071e-77\n" ] ], [ [ "El ` valor-p` indica aproximadamente la **probabilidad de que un sistema no correlacionado produzca conjuntos de datos que tengan una correlación de Pearson** al menos tan extrema como la calculada a partir de estos conjuntos de datos. Los valores p no son completamente confiables, pero probablemente sean razonables para conjuntos de datos mayores de 500 aproximadamente.", "_____no_output_____" ] ], [ [ "x = np.arange(-10,60,.1)\ny1 = st.norm.pdf(x,10,5)\ny2 = st.norm.pdf(x,30,8)\nplt.plot(x,y1,x,y2);", "_____no_output_____" ] ], [ [ "# Estimación de parámetros\n\nIlustremos a tráves de un ejemplo como usando el método montecarlos podemos estimar parámetros de interés de alguna distribución en particular.\n\n### Ejemplo\nConsidere una variable aleatoria $X\\sim U[a,b]$. Recordemos que\n$$E(X)={a+b\\over 2},\\quad Var(X)={(b-a)^2\\over 12}$$\n\nClaramente, la media de la muestra $\\bar X$ y la varianza muestral $S^2$ no nos proporcionan estimaciones directas de los parámetros a y b. Sin embargo, podríamos considerar la siguiente forma de transformar las estadísticas de muestra en estimaciones de parámetros. Si sustituimos $\\mu$ y $\\sigma^2$ con sus estimaciones, encontramos\n$$\\begin{split}a+b&=2\\bar X \\\\ -a+b&=2\\sqrt 3 S\\end{split}$$\n\nResolviendo este sistemas de ecuaciones obtenemos los siguientes estimados\n\n$$\\hat a = \\bar X -\\sqrt 3 S,\\quad \\hat b = \\bar X+ \\sqrt 3 S$$", "_____no_output_____" ] ], [ [ "# Solución ejemplo \na= 5; b=10 # Parámetros reales\nN = 10; # Cantidad de términos\nX = np.random.uniform(a,b,N)\nmedia = np.mean(X)\nstd = np.std(X)\n\n# estimaciones\na_hat = media-np.sqrt(3)*std\nb_hat = media+np.sqrt(3)*std\nprint('Estimación de a = ',a_hat)\nprint('Estimación de b = ',b_hat)", "Estimación de a = 4.66591853352\nEstimación de b = 9.56006069181\n" ] ], [ [ "# Método de máxima verosimilitud\n\nEl método de máxima verosimilitud es un enfoque alternativo para encontrar estimadores de forma sistemática. Imagine que una variable aleatoria X tiene un PDF caracterizado por un único parámetro $\\theta$, denotado por $f_x(x;\\theta)$. Si extraemos una muestra de n i.i.d. variables de esta distribución, la densidad conjunta es solo el producto de PDF's individuales:\n\n$$f_{X_1,\\cdots,X_n}(x_1,\\cdots,x_n;\\theta)=f_X(x_1;\\theta)\\cdot f_X(x_2;\\theta)\\cdots f_X(x_n;\\theta)=\\prod_{i=1}^{n}f_X(x_i;\\theta)$$\n\nSi nosotros estamos interesados en estimar $\\theta$ dada una muestra $X_i=x_i,i=1,\\cdots,n$, podemos construir la función verosimilitud\n\n$$L(\\theta)=L(\\theta;x_1,\\cdots,x_n)=f_{X_1,\\cdots,X_n}(x_1,\\cdots,x_n;\\theta)$$\n\nEsta notación es usada para enfatizar que la función depende del parámetros desconocido $\\theta$, para una muestra de observaciones dada. El acrónimo para referirse a este método es MLE 'maximum-likelihood estimator'\n\nLa intuición sugiere que deberíamos seleccionar el parámetro $\\theta$ que produzca el mayor valor de la función de probabilidad. Por lo tanto lo que debemos de hacer es encontrar el $\\theta$ que maximice la expresión anterior y para ellos podemos hacer uso de la derivada para encontrar los puntos críticos de la función $L(\\theta)$. Ilustremos el método con el siguiente ejemplo.", "_____no_output_____" ], [ "## Ejemplo bernoulli\n\nSupongamos $X_1,...X_n\\sim Bernoulli(p)$. La función de densidad correspondiente es $p(x;p)=p x(1−p)^{1−x}$, por lo que:\n$$\n\\mathcal{L}(p)=\\prod_{i=1}^n p(x_i;p)=\\prod_{i=1}^n p^{x_i}(1-p)^{1-x_i}=p^{\\sum x_i}(1-p)^{n-\\sum x_i}\n$$\ndenotemos $S=\\sum x_i$, entonces \n$$\n\\mathcal{l}(p)=S \\log p + (n-S) \\log (1-p)\n$$\n\nEncontremos su máxima verosimilitud", "_____no_output_____" ], [ " La derivada con respecto a $ p$ es: \n![image.png](attachment:image.png)\nla cuál se anula en:\n$$\n\\hat p = \\frac{\\sum x_i}{n}\n$$\n> Referencias: \n> - https://ljk.imag.fr/membres/Bernard.Ycart/emel/cours/ep/node12.html\n> - https://tereom.github.io/est-computacional-2018/maxima-verosimilitud.html", "_____no_output_____" ] ], [ [ "from scipy import optimize\n\nn = 20\nS = 12\nl = lambda theta: S*np.log(theta) + (n - S) * np.log(1-theta)\nL = lambda theta: theta**S * (1-theta)**(n-S)\ntheta = np.arange(0.001,1,0.001)\n\nplt.plot(theta,L(theta),label= r'L($\\theta$)')\nplt.legend()\nplt.show()\n\nplt.plot(theta,l(theta),label= r'l($\\theta$)')\nplt.legend()\nplt.show()\n\nmax_L = optimize.fmin(lambda x:-L(x),0,disp=False)\nprint(r'máximo $\\theta$ de L($\\theta$)=', max_L)\nmax_teorico = S/n\nprint('máximo teorico =',max_teorico)", "_____no_output_____" ] ], [ [ "### Ejemplo: MLE para la función de distribución exponencial\n\nLa PDF de una V.A exponencial esta dada por:\n$$f_X(x;\\lambda)=\\lambda e^{-\\lambda x}$$\n\nEncuentre la función de máxima verosimilitud para demostrar que la condición de optimalidad de primer orden conduce a \n\n$$\\hat \\lambda = {1\\over {1\\over n}\\sum_{i=1}^nX_i}={1 \\over \\bar X}$$", "_____no_output_____" ], [ "### Forma de utilizar este método en python", "_____no_output_____" ] ], [ [ "################ Adjusting a exponential distribution \nnp.random.seed(5555)\n# picking 150 of from a exponential distrubution\n# with lambda = 5\nsamp = st.expon.rvs(loc=0,scale=1/5,size=150) \n\nparam = st.expon.fit(samp) # distribution fitting\n# now, param[0] and param[1]=1/lambda are the location and scale\n# of the fitted distribution\nx = np.linspace(0,2,100)\n# fitted distribution\npdf_fitted = st.expon.pdf(x,loc=param[0],scale=param[1])\n# original distribution\npdf = st.expon.pdf(x,loc=0,scale=1/5)\n\nplt.title('Expoential distribution')\nplt.plot(x,pdf_fitted,'r-',label='Fitted')\nplt.plot(x,pdf,'b-',label='Original')\nplt.legend()\nplt.hist(samp,20,normed=1,alpha=.8,)\nplt.show()\nprint('Lambda fitted = ',1/param[1])", "_____no_output_____" ] ], [ [ "# <font color = red> Tarea\n\nDemostrar **Teoricamente** usando el MLE, que los estimadores de máxima verosimilitud para los parámetros $\\mu$ y $\\sigma$ de una distribución normal, estan dados por:\n\n$$\\hat \\mu = {1\\over n}\\sum_{i=1}^n x_i,\\quad \\hat \\sigma^2={1\\over n}\\sum_{i=1}^n (x_i-\\hat \\mu)^2$$\n\n**Recuerde que:** La distribución normal es\n$$f(x\\mid \\mu ,\\sigma ^{2})={\\frac {1}{\\sqrt {2\\pi \\sigma ^{2}}}}e^{-{\\frac {(x-\\mu )^{2}}{2\\sigma ^{2}}}}$$", "_____no_output_____" ], [ "### Forma de utilizar este método en python", "_____no_output_____" ] ], [ [ "################ Adjusting a normal distribution \nnp.random.seed(5555)\n# picking 150 of from a normal distrubution\n# with mean 0 and standard deviation 1\nsamp = st.norm.rvs(loc=0,scale=1,size=100) \n\nparam = st.norm.fit(samp) # distribution fitting\n\n# now, param[0] and param[1] are the mean and \n# the standard deviation of the fitted distribution\nx = np.linspace(-5,5,100)\n# fitted distribution\npdf_fitted = st.norm.pdf(x,loc=param[0],scale=param[1])\n# original distribution\npdf = st.norm.pdf(x)\n\nplt.title('Normal distribution')\nplt.plot(x,pdf_fitted,'r-',label='Fitted')\nplt.plot(x,pdf,'b-',label='Original')\nplt.legend()\nplt.hist(samp,30,density=1,alpha=.8,)\nplt.show()\n\nprint('Media encontrada usando MLE=',param[0],', \\nStd encontrada usando MLE=',param[1])\n\n# Parameters obtained theoricaly\nsigma_hat = np.std(samp)\nmu_hat = np.mean(samp)\nprint('Media obtenida usando la media muestral=',mu_hat,', \\nStd obtenida usando la std muestral=',sigma_hat)\n", "_____no_output_____" ] ], [ [ "<script>\n $(document).ready(function(){\n $('div.prompt').hide();\n $('div.back-to-top').hide();\n $('nav#menubar').hide();\n $('.breadcrumb').hide();\n $('.hidden-print').hide();\n });\n</script>\n\n<footer id=\"attribution\" style=\"float:right; color:#808080; background:#fff;\">\nCreated with Jupyter by Oscar David Jaramillo Zuluaga.\n</footer>", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ] ]
cbfdefd1ca92b4561ac700a87a4cdba226201c18
43,006
ipynb
Jupyter Notebook
code/3_Drug_Properties.ipynb
menchelab/Perturbome
c93aeb2d42a1900f5060322732dd97f8eb8db7bd
[ "MIT" ]
5
2019-11-15T19:58:31.000Z
2021-12-08T19:30:10.000Z
code/3_Drug_Properties.ipynb
mcaldera/Perturbome
82c752f90f7100865c09cfea0f1fe96deffe2ed9
[ "MIT" ]
1
2020-01-06T21:23:57.000Z
2020-01-07T14:06:21.000Z
code/3_Drug_Properties.ipynb
mcaldera/Perturbome
82c752f90f7100865c09cfea0f1fe96deffe2ed9
[ "MIT" ]
4
2019-11-26T07:34:49.000Z
2022-02-22T06:41:43.000Z
39.527574
216
0.572781
[ [ [ "# Properties of drugs\nFind various properties of the individual drugs \n \n1.) ATC \n2.) GO Annotations \n3.) Disease \n4.) KeGG Pathways \n5.) SIDER (known effects) \n6.) Offside (known off sides) \n7.) TwoSides \n8.) Drug Properties (physico-chemical properties) \n9.) Enzymes, Transporters and Carriers \n10.) Chemical_Gentic Perturbations (MsigDB)", "_____no_output_____" ], [ "## 1. ATC \nExtract information about the anatomical as well as therapeutic group a drug is associated to using DrugBank as main source", "_____no_output_____" ] ], [ [ "import networkx as nx\n\n#The the ATC classification from drugbank (see python file: 2a_Create_DrugBank_Network.ipynb)\nDrugBankInfo = nx.read_gml('../data/Drug_Properties/Drugbank_2018-07-03_CLOUD_Only.gml')\nprint 'DrugBank Network loaded'\n\n#Create output file\nfp_out = open('../results/Drug_Properties/CLOUD_to_ATC.csv','w')\nfp_out.write('CLOUD,DrugBankID,First_Level_ATCs,Second_Level_ATCs\\n')\n\n#Dictionary containing DrugBank to CLOUD identifier\nDrugBank_to_CLOUD = {}\n#parse through all CLOUD drugs and check for ATC code annotation in drugbank (Use first and second level; third level and below too specific)\nfp = open('../data/Drug_Properties/CLOUD_DrugBank_PubChem_Chembl.csv','r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n DrugBank_to_CLOUD[tmp[1]] = tmp[0]\n first_level = set()\n fist_second_level = set()\n if DrugBankInfo.has_node(tmp[1]):\n if DrugBankInfo.node[tmp[1]].has_key('ATCcode'):\n atc_codes = DrugBankInfo.node[tmp[1]]['ATCcode'].split(',')\n if '' in atc_codes:\n atc_codes.remove('')\n\n for atc in atc_codes:\n atc = atc.strip()\n first_level.add(atc[0])\n fist_second_level.add(atc[0:3])\n\n fp_out.write(tmp[0]+','+tmp[1]+','+';'.join(first_level)+','+';'.join(fist_second_level)+'\\n')\n\nfp.close()\nfp_out.close()\n\nprint 'Finished ATC annotations'", "_____no_output_____" ] ], [ [ "## 2. GO Annotations\nExtract GO annotations from GeneOntology for the targets of the individual drugs. Not only leaf but also upstream term information is collected for the three branches (i) Function, (ii) Component, (iii) Process", "_____no_output_____" ] ], [ [ "#use our inhouse database and the corresponding python file to create the upward ontology for every leaf GO term (all get included)\n#Download (http://www.geneontology.org/page/downloads)\nimport gene2terms_addupstream as GO\n\n#Include all threee GO branches\ngo_branches = ['Function','Process','Component']\n\n#Find all the targets for the individual cloud drugs\ncloud_targets = {}\nfp = open('../data/Drug_Properties/CLOUD_All_Targets.csv', 'r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n cloud_targets[tmp[0]] = tmp[2].split(';')\nfp.close()\n\n#contain all CLOUD identifier\nall_clouds = cloud_targets.keys()\nall_clouds.sort()\n\n#Go throug the GO branches and find GO terms for a specific drug via: Drug --> Targets --> Associated GO-Terms\ndrug_to_GO = {}\nfor go_branch in go_branches:\n print go_branch\n drug_to_GO[go_branch] = {}\n GO_Association_UP, GO_genes_annotation = GO.getAllGene_Annotation(go_branch)\n \n for drug in all_clouds:\n drug_to_GO[go_branch][drug] = []\n for target in cloud_targets[drug]:\n drug_to_GO[go_branch][drug].extend(GO_Association_UP[target])\n drug_to_GO[go_branch][drug] = list(set(drug_to_GO[go_branch][drug]))\n \n#Save CLOUD drug to GO term annotations\nfp_out = open('../results/Drug_Properties/CLOUD_to_GOterms.csv','w')\nfp_out.write('CLOUD,GO_Function,GO_Process,GO_Component\\n')\nfor cloud in all_clouds:\n fp_out.write(cloud+','+';'.join(drug_to_GO['Function'][cloud])+','+';'.join(drug_to_GO['Process'][cloud])+','+';'.join(drug_to_GO['Component'][cloud])+'\\n')\nfp_out.close()\n\nprint 'Finished GO'", "_____no_output_____" ] ], [ [ "## 3. Diseases\nExtract Disesase annotations from DiseaseOntology for the targets of the individual drugs. Not only leaf but also upstream term information is collected.", "_____no_output_____" ] ], [ [ "# Download from http://www.disgenet.org/web/DisGeNET/menu/downloads and http://disease-ontology.org/downloads/\n# Again use inhouse database (manually curated), and corresponding scripts \n\n# Get all cloud drug targets\nfp = open('../data/Drug_Properties/CLOUD_All_Targets.csv', 'r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n cloud_targets[tmp[0]] = tmp[2].split(';')\nfp.close()\n\nall_clouds = cloud_targets.keys()\nall_clouds.sort()\n\n#Extrate the upward disease ontology (find all disease associated leaf plus upwards ontology terms for a specific gene)\nDisease_Association_UP,d_diseases_annotation = GO.getAllGene_Disease_Annotation()\n\n\nall_proteins = Disease_Association_UP.keys()\nall_proteins = [int(x) for x in all_proteins]\nall_proteins.sort()\n\nfp_out = open('../results/Drug_Properties/Gene_to_Disease.csv','w')\nfp_out.write('Gene,Disease_ID\\n')\nfor protein in all_proteins:\n fp_out.write(str(protein)+','+';'.join(Disease_Association_UP[str(protein)])+'\\n')\nfp_out.close()\n\n\n\nbreak\n\n\n#associated drug with diseaes\ndrug_to_Diseases = {}\nfor drug in all_clouds:\n drug_to_Diseases[drug] = []\n for target in cloud_targets[drug]:\n drug_to_Diseases[drug].extend(Disease_Association_UP[target])\n drug_to_Diseases[drug] = list(set(drug_to_Diseases[drug]))\n \n\n\nfp_out = open('../results/Drug_Properties/CLOUD_to_Disease.csv','w')\nfp_out.write('CLOUD,Disease_ID\\n')\nfor cloud in all_clouds:\n fp_out.write(cloud+','+';'.join(drug_to_Diseases[cloud])+'\\n')\nfp_out.close()\n\nprint 'Finished Diseases'", "_____no_output_____" ] ], [ [ "## 4. KeGG Pathways\nExtract information about pathways being annotated to (i) the drug itself, as well as (ii) pathways associated to the target of drugs", "_____no_output_____" ] ], [ [ "'''\nExtract direct drug <--> pathway annotations\n'''\n\n#Get KeGG pathways via the biopython.KEGG REST \nfrom Bio.KEGG import REST\n\n#Find the KeGG identifiers via the drugbank annotations\nDrugBankInfo = nx.read_gml('../data/Drug_Properties/Drugbank_2018-07-03_CLOUD_Only.gml')\nprint 'DrugBank Network loaded'\n\n#parse through all CLOUD targets\nfp = open('../data/Drug_Properties/CLOUD_DrugBank_PubChem_Chembl.csv','r')\nfp.next()\ndrug_to_pathways = {}\nall_targeted_Pathways = set()\nall_clouds = []\nkegg_IDs = {}\n\n#find the KeGG Drug page and find PATHWAY informations (direct drug to pathway)\nfor line in fp:\n tmp = line.strip().split(',')\n \n drug_to_pathways[tmp[0]] = []\n \n all_clouds.append(tmp[0])\n \n if DrugBankInfo.has_node(tmp[1]):\n if DrugBankInfo.node[tmp[1]].has_key('KEGGDrug'):\n kegg_ID = DrugBankInfo.node[tmp[1]]['KEGGDrug']\n kegg_IDs[tmp[0]] = kegg_ID\n drug_file = REST.kegg_get(kegg_ID).read()\n\n for line in drug_file.rstrip().split(\"\\n\"):\n section = line[:12].strip() # section names are within 12 columns\n if not section == \"\":\n current_section = section\n if current_section == \"PATHWAY\":\n tmp2 = line[12:].split(' ')\n pathwayID = tmp2[0].split('(')[0]\n drug_to_pathways[tmp[0]].append(pathwayID)\n all_targeted_Pathways.add(pathwayID)\n \nprint 'Number of pathways directed targeted: %d' %len(all_targeted_Pathways)\n\nall_clouds.sort()\n", "_____no_output_____" ], [ "'''\nAdditonally to finding the direct annotations, also find drug <--> targets <--> pathways associated to those target annotations\n'''\n\n#Get all targets\ncloud_targets = {}\nfp = open('../data/Drug_Properties/CLOUD_All_Targets.csv', 'r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n cloud_targets[tmp[0]] = tmp[2].split(';')\nfp.close()\n\n# find human pahtways\nhuman_pathways = REST.kegg_list(\"pathway\", \"hsa\").read()\n\n# get all human pathways, and add the dictionary\npathways = {}\nfor line in human_pathways.rstrip().split(\"\\n\"):\n entry, description = line.split(\"\\t\")\n pathways[entry] = {'Description' :description, 'IDs':None,'Symbols':None}\n\n\nprint len(pathways)\n# Get the genes for pathways and add them to a list\n\nfor pathway in pathways.keys():\n pathway_file = REST.kegg_get(pathway).read() # query and read each pathway\n\n # iterate through each KEGG pathway file, keeping track of which section\n # of the file we're in, only read the gene in each pathway\n current_section = None\n\n genesSymbols = []\n genesIDs = []\n for line in pathway_file.rstrip().split(\"\\n\"):\n section = line[:12].strip() # section names are within 12 columns\n if not section == \"\":\n current_section = section\n\n if current_section == \"GENE\":\n if ';' in line:\n gene_identifiers, gene_description = line[12:].split(\"; \")\n gene_id, gene_symbol = gene_identifiers.split()\n\n if not gene_id in genesIDs:\n genesIDs.append(gene_id)\n genesSymbols.append(gene_symbol)\n\n pathways[pathway] = genesIDs\n\nvia_target_assigned_Pathways = {}\nsecond_assigned_pathways = set()\nfor cloud in all_clouds:\n via_target_assigned_Pathways[cloud] = [] \n targets = cloud_targets[cloud]\n for p in pathways:\n if len(set(targets).intersection(set(pathways[p]))) > 0:\n via_target_assigned_Pathways[cloud].append(p)\n second_assigned_pathways.add(p)\n \nprint 'Number of pathways indirected targeted: %d' %len(second_assigned_pathways)\n\nfp_out = open('../results/Drug_Properties/CLOUD_to_KeGG_Pathways.csv','w')\nfp_out.write('CLOUD,KeGG_DrugID,KeGG_Assigned_Pathways,Via_Target_Assigned\\n')\nfor cloud in all_clouds:\n if kegg_IDs.has_key(cloud):\n fp_out.write(cloud+','+kegg_IDs[cloud]+','+';'.join(drug_to_pathways[cloud])+','+';'.join(via_target_assigned_Pathways[cloud])+'\\n')\n else:\n fp_out.write(cloud+',,'+';'.join(drug_to_pathways[cloud])+','+';'.join(via_target_assigned_Pathways[cloud])+'\\n')\n \nfp_out.close()\n\nprint 'Finished Pathways' ", "_____no_output_____" ] ], [ [ "## 5. SIDER\nExtract information about known adverse reaction of drugs using the Sider database", "_____no_output_____" ] ], [ [ "def ATC_To_PubChem(isOffsides = 'None'):\n '''\n Sider offerst a direct conversion from ATC code to the internally used PubChem ID.\n Offers a better coverage. \n \n Download: http://sideeffects.embl.de/download/ [Nov. 2018] drug_atc.tsv file\n (here named: Pubchem_To_ATC)\n '''\n\n dic_ATc_To_Pubchem = {}\n fp = open('../data/Drug_Properties/Pubchem_To_ATC.tsv')\n for line in fp:\n tmp = line.strip().split('\\t')\n dic_ATc_To_Pubchem[tmp[1]] = tmp[0]\n\n cloud_drugs = nx.read_gml('../data/Drug_Properties/Drugbank_2018-07-03_CLOUD_Only.gml')\n \n \n #find pubchem identifiers via ATC identifiers (as pubchem identifiers sometimes not unique neithers SID nor CID)\n cloud_to_Pubchem = {}\n PubChem_to_cloud = {}\n found_PubChems = []\n for drugBankID in cloud_drugs.nodes():\n if cloud_drugs.node[drugBankID].has_key('ATCcode'):\n all_codes = [x.strip() for x in cloud_drugs.node[drugBankID]['ATCcode'].split(',') if x != '']\n for code in all_codes:\n if dic_ATc_To_Pubchem.has_key(code):\n pubChemID = dic_ATc_To_Pubchem[code][3:]\n if isOffsides == 'offsides':\n tmp = list(pubChemID)\n tmp[0] = '0'\n pubChemID = ''.join(tmp)\n\n cloud_to_Pubchem[drugBankID] = pubChemID\n PubChem_to_cloud[pubChemID] = drugBankID\n found_PubChems.append(pubChemID)\n\n\n return cloud_to_Pubchem, PubChem_to_cloud,found_PubChems\n", "_____no_output_____" ], [ "'''\nDownload SIDER.tsv from http://sideeffects.embl.de/download/ [Nov. 2018] \n'''\n\n#get the different identifiers of a drug\nDrugBank_To_CLOUD = {}\nCLOUD_To_DrugBank = {}\nfp = open('../data/Drug_Properties/CLOUD_DrugBank_PubChem_Chembl.csv')\nfp.next()\nall_clouds = []\nfor line in fp:\n tmp = line.strip().split(',')\n all_clouds.append(tmp[0])\n DrugBank_To_CLOUD[tmp[1]] = tmp[0]\n CLOUD_To_DrugBank[tmp[0]] = tmp[1]\nfp.close()\n\nall_clouds.sort()\n\n#extract pubchem identifier via ATC codes\nDrugBank_to_Pubchem_viaATC, PubChem_to_cloud_viaATC,found_PubChems_viaATC = ATC_To_PubChem()\n\n#further use drugbank to find additional pubchem identifiers for the cloud drugs\ncloud_drugs = nx.read_gml('../data/Drug_Properties/Drugbank_2018-07-03_CLOUD_Only.gml')\n\n#associate cloud with the different pubchem identifiers\npubchemCompound_To_DrugBank = {}\nDrugBank_to_PubChem = {}\npubchemCompound = []\npubchemSubstance = []\nfor node in cloud_drugs.nodes():\n if cloud_drugs.node[node].has_key('PubChemCompound'):\n pubchemCompound.append(cloud_drugs.node[node]['PubChemCompound'])\n pubchemCompound_To_DrugBank[cloud_drugs.node[node]['PubChemCompound']] = node\n DrugBank_to_PubChem[node] = cloud_drugs.node[node]['PubChemCompound']\n\n#Combine both dictionaries together\nfor key in DrugBank_to_Pubchem_viaATC:\n DrugBank_to_PubChem[key] = DrugBank_to_Pubchem_viaATC[key]\n\n\n\n#check the SIDER database for given sideeffect of a given drug (once via the ATC to pubchem identfiers; once via drugbank to pubchem)\ncompund_sideEffect = {}\nfp = open('../data/Drug_Properties/SIDER.tsv','r')\nfor line in fp:\n tmp = line.strip().split('\\t')\n id1 = tmp[1][3:]\n id2 = tmp[2][3:]\n\n if id1 in found_PubChems_viaATC:\n if compund_sideEffect.has_key(PubChem_to_cloud_viaATC[id1]):\n compund_sideEffect[PubChem_to_cloud_viaATC[id1]].append(tmp[3])\n else:\n compund_sideEffect[PubChem_to_cloud_viaATC[id1]] = [tmp[3]]\n\n if id1 in pubchemCompound:\n if compund_sideEffect.has_key(pubchemCompound_To_DrugBank[id1]):\n compund_sideEffect[pubchemCompound_To_DrugBank[id1]].append(tmp[3])\n else:\n compund_sideEffect[pubchemCompound_To_DrugBank[id1]] = [tmp[3]]\n\n\n\n if id2 in found_PubChems_viaATC:\n if compund_sideEffect.has_key(PubChem_to_cloud_viaATC[id2]):\n compund_sideEffect[PubChem_to_cloud_viaATC[id2]].append(tmp[3])\n else:\n compund_sideEffect[PubChem_to_cloud_viaATC[id2]] = [tmp[3]]\n\n if id2 in pubchemCompound:\n if compund_sideEffect.has_key(pubchemCompound_To_DrugBank[id2]):\n compund_sideEffect[pubchemCompound_To_DrugBank[id2]].append(tmp[3])\n else:\n compund_sideEffect[pubchemCompound_To_DrugBank[id2]] = [tmp[3]]\n\n##\n# Save results\n##\n\n\nfp = open('../results/Drug_Properties/CLOUD_to_SIDER.csv','w')\nfp.write('CLOUD,PubChem,SIDER_Ids\\n')\nfor key in all_clouds:\n if compund_sideEffect.has_key(CLOUD_To_DrugBank[key]):\n fp.write(key +','+DrugBank_to_PubChem[CLOUD_To_DrugBank[key]]+','+';'.join(list(set(compund_sideEffect[CLOUD_To_DrugBank[key]])))+'\\n')\n elif DrugBank_to_PubChem.has_key(CLOUD_To_DrugBank[key]):\n fp.write(key +','+DrugBank_to_PubChem[CLOUD_To_DrugBank[key]]+',' + '\\n')\n else:\n fp.write(key + ',,\\n')\nfp.close()\n\nprint 'Finish with SIDER'", "_____no_output_____" ] ], [ [ "## 6. Offsides\nExtract information about known adverse reaction of drugs using the Offside database (Tantonetti)", "_____no_output_____" ] ], [ [ "'''\nDownload Offsides.tsv from http://tatonettilab.org/resources/tatonetti-stm.html [Nov. 2018] \n'''\n#get the different identifiers of a drug\nDrugBank_To_CLOUD = {}\nCLOUD_To_DrugBank = {}\nfp = open('../data/Drug_Properties/CLOUD_DrugBank_PubChem_Chembl.csv')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n DrugBank_To_CLOUD[tmp[1]] = tmp[0]\n CLOUD_To_DrugBank[tmp[0]] = tmp[1]\nfp.close()\n\n#extract pubchem identifier via ATC codes\nDrugBank_to_Pubchem_viaATC, PubChem_to_cloud_viaATC, found_PubChems_viaATC = ATC_To_PubChem('offsides')\n\n#further use drugbank to find additional pubchem identifiers for the cloud drugs\ncloud_drugs = nx.read_gml('../data/Drug_Properties/Drugbank_2018-07-03_CLOUD_Only.gml')\n\n#associate cloud with the different pubchem identifiers\npubchemCompound_To_DrugBank = {}\nDrugBank_to_PubChem = {}\npubchemCompound = []\npubchemSubstance = []\nfor node in cloud_drugs.nodes():\n if cloud_drugs.node[node].has_key('PubChemCompound'):\n pubchemCompound.append(cloud_drugs.node[node]['PubChemCompound'].zfill(9))\n pubchemCompound_To_DrugBank[cloud_drugs.node[node]['PubChemCompound'].zfill(9)] = node\n DrugBank_to_PubChem[node] = cloud_drugs.node[node]['PubChemCompound'].zfill(9)\n\n# Combine both dictionaries together\nfor key in DrugBank_to_Pubchem_viaATC:\n DrugBank_to_PubChem[key] = DrugBank_to_Pubchem_viaATC[key]\n\n\n#check the OFFSIDES database for given sideeffect of a given drug (once via the ATC to pubchem identfiers; once via drugbank to pubchem)\ncompund_sideEffect = {}\nfp = open('../data/Drug_Properties/Offsides.tsv', 'r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split('\\t')\n\n\n id1 = tmp[0].replace('\"','')[3:]\n sideEffect = tmp[2].replace('\"','')\n\n #print id1\n\n if id1 in found_PubChems_viaATC:\n if compund_sideEffect.has_key(PubChem_to_cloud_viaATC[id1]):\n compund_sideEffect[PubChem_to_cloud_viaATC[id1]].append(sideEffect)\n else:\n compund_sideEffect[PubChem_to_cloud_viaATC[id1]] = [sideEffect]\n print len(compund_sideEffect.keys())\n # print compund_sideEffect.keys()\n\n if id1 in pubchemCompound:\n if compund_sideEffect.has_key(pubchemCompound_To_DrugBank[id1]):\n compund_sideEffect[pubchemCompound_To_DrugBank[id1]].append(sideEffect)\n else:\n compund_sideEffect[pubchemCompound_To_DrugBank[id1]] = [sideEffect]\n print len(compund_sideEffect.keys())\n # print compund_sideEffect.keys()\n\nfp = open('../results/Drug_Properties/CLOUD_to_Offsides.csv', 'w')\nfp.write('CLOUD,PubChem,OFFSIDE_Ids\\n')\nfor key in all_clouds:\n if compund_sideEffect.has_key(CLOUD_To_DrugBank[key]):\n fp.write(key +','+DrugBank_to_PubChem[CLOUD_To_DrugBank[key]]+','+';'.join(list(set(compund_sideEffect[CLOUD_To_DrugBank[key]])))+'\\n')\n elif DrugBank_to_PubChem.has_key(CLOUD_To_DrugBank[key]):\n fp.write(key + ',' +DrugBank_to_PubChem[CLOUD_To_DrugBank[key]]+',' + '\\n')\n else:\n fp.write(key + ',,\\n')\nfp.close()\n\nprint 'Finish with OFFSIDES'", "_____no_output_____" ] ], [ [ "## 7. TwoSides\nExtract information about side effects for drug combinations using TwoSide (Tantonetti))", "_____no_output_____" ] ], [ [ "'''\nDownload Offsides.tsv from http://tatonettilab.org/resources/tatonetti-stm.html [Nov. 2018] \n'''\n#get the different identifiers of a drug\nDrugBank_To_CLOUD = {}\nCLOUD_To_DrugBank = {}\nfp = open('../data/Drug_Properties/CLOUD_DrugBank_PubChem_Chembl.csv')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n DrugBank_To_CLOUD[tmp[1]] = tmp[0]\n CLOUD_To_DrugBank[tmp[0]] = tmp[1]\nfp.close()\n\n#extract pubchem identifier via ATC codes\nDrugBank_to_Pubchem_viaATC, PubChem_to_cloud_viaATC, found_PubChems_viaATC = ATC_To_PubChem('offsides')\n\n#further use drugbank to find additional pubchem identifiers for the cloud drugs\ncloud_drugs = nx.read_gml('../data/Drug_Properties/Drugbank_2018-07-03_CLOUD_Only.gml')\n\npubchemCompound_To_DrugBank = {}\nDrugBank_to_PubChem = {}\npubchemCompound = []\npubchemSubstance = []\nfor node in cloud_drugs.nodes():\n if cloud_drugs.node[node].has_key('PubChemCompound'):\n pubchemCompound.append(cloud_drugs.node[node]['PubChemCompound'].zfill(9))\n pubchemCompound_To_DrugBank[cloud_drugs.node[node]['PubChemCompound'].zfill(9)] = node\n DrugBank_to_PubChem[node] = cloud_drugs.node[node]['PubChemCompound'].zfill(9)\n\n# Combine both dictionaries together\nfor key in DrugBank_to_Pubchem_viaATC:\n DrugBank_to_PubChem[key] = DrugBank_to_Pubchem_viaATC[key]\n \n \n#check the SIDER database for given sideeffect of a given drug (once via the ATC to pubchem identfiers; once via drugbank to pubchem)\nTwoSide_Network = nx.Graph()\nfp = open('../data/Drug_Properties/TwoSides.tsv', 'r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split('\\t')\n\n\n id1 = tmp[0][3:]\n id2 = tmp[1][3:]\n sideEffect = tmp[4]\n\n #print id1\n found_id1 = None\n found_id2 = None\n \n if id1 in found_PubChems_viaATC: \n found_id1 = PubChem_to_cloud_viaATC[id1]\n elif id1 in pubchemCompound: \n found_id1 = pubchemCompound_To_DrugBank[id1]\n \n if found_id1 != None:\n if id2 in found_PubChems_viaATC: \n found_id2 = PubChem_to_cloud_viaATC[id2]\n elif id2 in pubchemCompound: \n found_id2 = pubchemCompound_To_DrugBank[id2]\n \n \n if found_id2 != None:\n if TwoSide_Network.has_edge(found_id1,found_id2) == False:\n TwoSide_Network.add_edge(found_id1,found_id2)\n TwoSide_Network[found_id1][found_id2]['SideEffect'] = sideEffect\n else:\n TwoSide_Network[found_id1][found_id2]['SideEffect'] = TwoSide_Network[found_id1][found_id2]['SideEffect'] +',' + sideEffect\n \n \nnx.write_gml(TwoSide_Network,'../results/Drug_Properties/TwoSide_CLOUDs.gml')\n\nprint 'Finish with TwoSides'\n", "_____no_output_____" ] ], [ [ "## 8. Drug Properties\nExtract Physicochemical properties of the drugs e.g. Lipinski Rule of 5, LogS, LogP etc. Use DrugBank as main source of information", "_____no_output_____" ] ], [ [ "'''\nPhysicochemical properties (calculated) offered by DrugBank\n'''\n\n#List of interesting physicochemical properties (continues)\nContinuesfeatures = ['Polarizability','logS','logP','NumberofRings','PhysiologicalCharge',\n 'PolarSurfaceAreaPSA','pKastrongestbasic','pKastrongestacidic',\n 'Refractivity','MonoisotopicWeight','HBondDonorCount',\n 'RotatableBondCount','WaterSolubility']\n\n##List of interesting physicochemical properties (discrete)\ndiscreteFeatures = ['DrugSubClass','DrugClass','Family']\n\n#Drugbank file\nDrugBankInfo = nx.read_gml('../data/Drug_Properties/Drugbank_2018-07-03_CLOUD_Only.gml')\nprint 'DrugBank Network loaded'\n\n#output file\nfp = open('../data/Drug_Properties/CLOUD_DrugBank_PubChem_Chembl.csv','r')\nfp.next()\n\n\n#parse through all cloud drugs and find physicochemical propterties\nCLOUD_Chemical_properties = {}\nall_clouds = []\nkegg_IDs = {}\nfor line in fp:\n tmp = line.strip().split(',')\n \n all_clouds.append(tmp[0])\n CLOUD_Chemical_properties[tmp[0]] = {}\n \n \n if DrugBankInfo.has_node(tmp[1]):\n CLOUD_Chemical_properties[tmp[0]]['DrugBankID'] = tmp[1]\n \n for c in Continuesfeatures:\n if DrugBankInfo.node[tmp[1]].has_key(c):\n CLOUD_Chemical_properties[tmp[0]][c] = str(DrugBankInfo.node[tmp[1]][c])\n else:\n CLOUD_Chemical_properties[tmp[0]][c] = 'None'\n \n \n for d in discreteFeatures:\n if DrugBankInfo.node[tmp[1]].has_key(d):\n CLOUD_Chemical_properties[tmp[0]][d] = str(DrugBankInfo.node[tmp[1]][d])\n else:\n CLOUD_Chemical_properties[tmp[0]][d] = 'None'\n \n else:\n CLOUD_Chemical_properties[tmp[0]]['DrugBankID'] = 'None'\n \n for c in Continuesfeatures:\n CLOUD_Chemical_properties[tmp[0]][c] = 'None'\n \n for d in discreteFeatures:\n CLOUD_Chemical_properties[tmp[0]][d] = 'None'\n \n##\n# Save results\n##\n \nfp = open('../results/Drug_Properties/CLOUD_to_ChemicalProperties.tsv', 'w')\nfp.write('CLOUD\\tDrugBankID\\t')\nfp.write('\\t'.join(Continuesfeatures)+'\\t'+'\\t'.join(discreteFeatures)+'\\n') \n \nfor cloud in all_clouds:\n fp.write(cloud+'\\t'+CLOUD_Chemical_properties[cloud]['DrugBankID'])\n for c in Continuesfeatures:\n fp.write('\\t'+CLOUD_Chemical_properties[cloud][c])\n for d in discreteFeatures:\n fp.write('\\t'+CLOUD_Chemical_properties[cloud][d])\n fp.write('\\n')\nfp.close()\n\nprint 'Finish with Chemical Properties'\n", "_____no_output_____" ] ], [ [ "## 9. Targets, Enzymes, Transporters and Carriers\nSplit the full lust of targets into targets, enzymes, transporters and carriers\nTherefore use the DrugBank annotations of what a target, transporter, carrier and enzyme is. Go trough all drugbank targets and take the corresponding annotations.\nThen go trough the CLOUD targets and assign the targets accordingly. If drugbank does not show any annotation the gene is assumed to be a target.\n\nEnzymes: e.g. CYP3A1 \nTransporter: e.g. MDR5 \nCarriers: e.g. ALB", "_____no_output_____" ] ], [ [ "DrugBankInfo = nx.read_gml('../data/Drug_Properties/Drugbank_2018-07-03.gml')\nprint 'Full DrugBank Network loaded'", "_____no_output_____" ], [ "annotated_enzyme_symbols = set()\nannotated_transporters_symbols = set()\nannotated_carriers_symbols = set()\n\n#Go through all drugs in drugbank and extract target information; bin it correctly into one of the three classes\nfor drug in list(DrugBankInfo.nodes()):\n \n if DrugBankInfo.node[drug].has_key('Enzymes'):\n enzymes = [x for x in DrugBankInfo.node[drug]['Enzymes'].strip().split(',') if x != '']\n for e in enzymes:\n annotated_enzyme_symbols.add(e.split('_')[0])\n if DrugBankInfo.node[drug].has_key('Transporters'):\n transporters = [x for x in DrugBankInfo.node[drug]['Transporters'].strip().split(',') if x != '']\n for t in transporters:\n annotated_transporters_symbols.add(t.split('_')[0])\n \n if DrugBankInfo.node[drug].has_key('Carriers'):\n carriers = [x for x in DrugBankInfo.node[drug]['Carriers'].strip().split(',') if x != '']\n for c in carriers:\n annotated_carriers_symbols.add(c.split('_')[0])\n\n#Plot the number of found Enzymes, Transporters, Carriers\nprint len(annotated_enzyme_symbols)\nprint len(annotated_transporters_symbols)\nprint len(annotated_carriers_symbols)", "_____no_output_____" ], [ "'''\nParse the enzyme, carriers and transporter SYMBOLS to EntrezIDs using mygeneinfo\n'''\n\nimport mygene\nmg = mygene.MyGeneInfo()\n\n#Enzymes\nquery = mg.querymany(annotated_enzyme_symbols, scope='symbol', species='human',verbose=False)\nfinal_annotated_enzyme_symbols = []\nfinal_annotated_enzyme_IDs = []\nfor result in query:\n if result.has_key('entrezgene'):\n final_annotated_enzyme_symbols.append(result['symbol'])\n final_annotated_enzyme_IDs.append(str(result['_id']))\n\n#Transporters\nquery = mg.querymany(annotated_transporters_symbols, scope='symbol', species='human',verbose=False)\nfinal_annotated_transporters_symbols = []\nfinal_annotated_transporters_IDs = []\nfor result in query:\n if result.has_key('entrezgene'):\n final_annotated_transporters_symbols.append(result['symbol'])\n final_annotated_transporters_IDs.append(str(result['_id']))\n\n#Carriers\nquery = mg.querymany(annotated_carriers_symbols, scope='symbol', species='human',verbose=False)\nfinal_annotated_carriers_symbols = []\nfinal_annotated_carriers_IDs = []\nfor result in query:\n if result.has_key('entrezgene'):\n final_annotated_carriers_symbols.append(result['symbol'])\n final_annotated_carriers_IDs.append(str(result['_id']))\n \n\nprint len(final_annotated_enzyme_IDs)\nprint len(final_annotated_transporters_IDs)\nprint len(final_annotated_carriers_IDs)\n \n ", "_____no_output_____" ], [ "'''\nCreate an output file with the various transporters/enzymes/targets etc. being split.\n'''\n\n#Get the DrugBank targets\ncloud_DrugBanktargets = {}\nfp = open('../data/Drug_Properties/CLOUD_DrugBank_Targets_ONLY.csv', 'r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n cloud_DrugBanktargets[tmp[0]] = tmp[2].split(';')\nfp.close()\n\n#Get all targets accociated to the individual CLOUDS (including CYP etc.)\ncloud_targets = {}\nfp = open('../data/Drug_Properties/CLOUD_All_Targets.csv', 'r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n cloud_targets[tmp[0]] = tmp[2].split(';')\nfp.close()\n\n#List containing all CLOUD identifiers\nall_clouds = cloud_targets.keys()\nall_clouds.sort()\n\n#Create output file\nfp_out = open('../results/Drug_Properties/CLOUD_to_TargetsSplit.csv', 'w')\nfp_out.write('CLOUD,Targets,Transporters,Enzymes,Carriers\\n')\n\n#save the per drug annotations of CLOUD drugs\ntargets_number = []\nenzymes_number = []\ntransporters_number = []\ncarriers_number = []\n\n#save total amount of distinct targets, enzymes etc. targeted by CLOUD\ndifferent_targets = set()\ndifferent_enzymes = set()\ndifferent_transporters = set()\ndifferent_carriers = set()\n\n#save total amount of targets found\nall_targets = 0\n\n#Go through all CLOUDS\nfor cloud in all_clouds:\n \n targets = []\n enzymes = []\n carriers = []\n transporters = []\n \n for target in cloud_targets[cloud]:\n \n \n #First check if the target is annoated in DrugBank to be a target of this drug! (sometimes CYP or other can be main targets)\n if target in cloud_DrugBanktargets[cloud]:\n targets.append(target)\n else:\n #If it is not the main target of this drug bin it correctly according to drugbank standards\n not_associated = False\n if target in final_annotated_enzyme_IDs:\n enzymes.append(target)\n not_associated = True\n if target in final_annotated_transporters_IDs:\n transporters.append(target)\n not_associated = True\n if target in final_annotated_carriers_IDs:\n carriers.append(target)\n not_associated = True\n\n if not_associated == False:\n targets.append(target)\n fp_out.write(cloud+','+';'.join(targets)+','+';'.join(transporters)+','+';'.join(enzymes)+','+';'.join(carriers)+'\\n')\n \n #Save the results\n all_targets += len(targets)\n targets_number.append(len(targets))\n enzymes_number.append(len(enzymes))\n transporters_number.append(len(transporters))\n carriers_number.append(len(carriers))\n \n different_targets = different_targets.union(set(targets))\n different_enzymes = different_enzymes.union(set(enzymes))\n different_transporters = different_transporters.union(set(transporters))\n different_carriers = different_carriers.union(set(carriers))\n \n \n \nfp_out.close()\n\n", "_____no_output_____" ], [ "'''\nCREATE OUTPUT OVERVIEW OVER DRUG TARGETS/ANNOTATIONS\n'''\n\n\nimport numpy as np\nfrom matplotlib import pylab as plt\n\n\nprint'Mean number of targets: %.2f' %np.mean(targets_number)\nprint'Median number of targets: %.2f' %np.median(targets_number)\nprint'Mean number of enzymes: %.2f' %np.mean(enzymes_number)\nprint'Mean number of carriers: %.2f' %np.mean(carriers_number)\nprint'Mean number of transporters: %.2f' %np.mean(transporters_number)\n\nprint 'Total number of targets: %d' %all_targets\nprint 'Number of distinct targets: %d' %len(different_targets)\nprint'Number of distinct enzymes: %d' %len(different_enzymes)\nprint'Number of distinct carriers: %d' %len(different_carriers)\nprint'Number of distinct transporters: %d' %len(different_transporters)\n\n\nplt.hist(targets_number,bins=22, color='#40B9D4')\nplt.axvline(np.mean(targets_number),ls='--', color='grey')\nplt.savefig('../results/Drug_Properties/CLOUD_TargetsFiltered.pdf')\nplt.close()", "_____no_output_____" ] ], [ [ "## 10. Chemical Genetic perturbations\nUse the msigDB Chemical_Genetic_Perturbations set to annotate the CLOUD target respetively", "_____no_output_____" ] ], [ [ "'''\nDownload from http://software.broadinstitute.org/gsea/msigdb/collections.jsp#C5 [December 17. 2018]\n'''\n\n#Get all CLOUD targets\ncloud_targets = {}\nfp = open('../data/Drug_Properties/CLOUD_All_Targets.csv', 'r')\nfp.next()\nfor line in fp:\n tmp = line.strip().split(',')\n cloud_targets[tmp[0]] = tmp[2].split(';')\nfp.close()\n\n#Find the gene to perturbation associated (one gene can have various associated perturbations)\nfp = open('../data/Drug_Properties/Msig_ChemGen_Perturbation.gmt','r')\ngene_to_perturbation = {}\nfor line in fp:\n tmp = line.strip().split('\\t')\n for gene in tmp[2:]:\n if gene_to_perturbation.has_key(gene):\n gene_to_perturbation[gene].append(tmp[0])\n else:\n gene_to_perturbation[gene] = [tmp[0]]\nfp.close()\n\n\n#find cloud associations via CLOUD --> Targets ===> Perturbations associated with certain targets\nfp_out = open('../results/Drug_Properties/CLOUD_to_Perturbations.csv', 'w')\nfp_out.write('CLOUD,Perturbations\\n')\nfor cloud in all_clouds:\n\n perturbations = []\n for gene in cloud_targets[cloud]:\n if gene_to_perturbation.has_key(gene):\n perturbations.extend(gene_to_perturbation[gene])\n fp_out.write(cloud+','+';'.join(perturbations)+'\\n')\nfp_out.close()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
cbfdf21885d4a2299af25573343ede4ebfd2802e
22,317
ipynb
Jupyter Notebook
coursera_ai/AssignmentAnomalyDetection.ipynb
mbalgabayev/claimed
b595c25a752874602054443fb8785611c5e863e4
[ "Apache-2.0" ]
308
2021-08-09T20:08:59.000Z
2022-03-31T15:24:02.000Z
coursera_ai/AssignmentAnomalyDetection.ipynb
mbalgabayev/claimed
b595c25a752874602054443fb8785611c5e863e4
[ "Apache-2.0" ]
10
2021-09-30T16:47:17.000Z
2022-03-25T14:25:16.000Z
coursera_ai/AssignmentAnomalyDetection.ipynb
mbalgabayev/claimed
b595c25a752874602054443fb8785611c5e863e4
[ "Apache-2.0" ]
615
2021-08-11T12:41:21.000Z
2022-03-31T18:08:12.000Z
31.972779
503
0.609849
[ [ [ "# Graded Programming Assignment\n\nIn this assignment, you will implement re-use the unsupervised anomaly detection algorithm but turn it into a simpler feed forward neural network for supervised classification.\n\nYou are training the neural network from healthy and broken samples and at later stage hook it up to a message queue for real-time anomaly detection.\n\nWe've provided a skeleton for you containing all the necessary code but left out some important parts indicated with ### your code here ###\n\nAfter you’ve completed the implementation please submit it to the autograder\n", "_____no_output_____" ] ], [ [ "!pip install tensorflow==2.2.0rc0", "_____no_output_____" ], [ "import tensorflow as tf\nif not tf.__version__ == '2.2.0-rc0':\n print(tf.__version__)\n raise ValueError('please upgrade to TensorFlow 2.2.0-rc0, or restart your Kernel (Kernel->Restart & Clear Output)')", "_____no_output_____" ] ], [ [ "Now we import all the dependencies ", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom numpy import concatenate\nfrom matplotlib import pyplot\nfrom pandas import read_csv\nfrom pandas import DataFrame\nfrom pandas import concat\nimport sklearn\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn.metrics import mean_squared_error\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Dense, Dropout\nfrom tensorflow.keras.layers import LSTM\nfrom tensorflow.keras.callbacks import Callback\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import LSTM, Dense, Activation\nimport pickle\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\nimport sys\nfrom queue import Queue\nimport pandas as pd\nimport json\n%matplotlib inline", "_____no_output_____" ] ], [ [ "We grab the files necessary for taining. Those are sampled from the lorenz attractor model implemented in NodeRED. Those are two serialized pickle numpy arrays. In case you are interested in how these data has been generated please have a look at the following tutorial. https://developer.ibm.com/tutorials/iot-deep-learning-anomaly-detection-2/", "_____no_output_____" ] ], [ [ "!rm watsoniotp.*\n!wget https://raw.githubusercontent.com/romeokienzler/developerWorks/master/lorenzattractor/watsoniotp.healthy.phase_aligned.pickle\n!wget https://raw.githubusercontent.com/romeokienzler/developerWorks/master/lorenzattractor/watsoniotp.broken.phase_aligned.pickle\n!mv watsoniotp.healthy.phase_aligned.pickle watsoniotp.healthy.pickle\n!mv watsoniotp.broken.phase_aligned.pickle watsoniotp.broken.pickle", "_____no_output_____" ] ], [ [ "De-serialize the numpy array containing the training data", "_____no_output_____" ] ], [ [ "data_healthy = pickle.load(open('watsoniotp.healthy.pickle', 'rb'), encoding='latin1')\ndata_broken = pickle.load(open('watsoniotp.broken.pickle', 'rb'), encoding='latin1')", "_____no_output_____" ] ], [ [ "Reshape to three columns and 3000 rows. In other words three vibration sensor axes and 3000 samples", "_____no_output_____" ], [ "Since this data is sampled from the Lorenz Attractor Model, let's plot it with a phase lot to get the typical 2-eyed plot. First for the healthy data", "_____no_output_____" ] ], [ [ "fig = plt.figure()\nax = fig.gca(projection='3d')\n\nax.plot(data_healthy[:,0], data_healthy[:,1], data_healthy[:,2],lw=0.5)\nax.set_xlabel(\"X Axis\")\nax.set_ylabel(\"Y Axis\")\nax.set_zlabel(\"Z Axis\")\nax.set_title(\"Lorenz Attractor\")", "_____no_output_____" ] ], [ [ "Then for the broken one", "_____no_output_____" ] ], [ [ "fig = plt.figure()\nax = fig.gca(projection='3d')\n\nax.plot(data_broken[:,0], data_broken[:,1], data_broken[:,2],lw=0.5)\nax.set_xlabel(\"X Axis\")\nax.set_ylabel(\"Y Axis\")\nax.set_zlabel(\"Z Axis\")\nax.set_title(\"Lorenz Attractor\")", "_____no_output_____" ] ], [ [ "In the previous examples, we fed the raw data into an LSTM. Now we want to use an ordinary feed-forward network. So we need to do some pre-processing of this time series data\n\nA widely-used method in traditional data science and signal processing is called Discrete Fourier Transformation. This algorithm transforms from the time to the frequency domain, or in other words, it returns the frequency spectrum of the signals.\n\nThe most widely used implementation of the transformation is called FFT, which stands for Fast Fourier Transformation, let’s run it and see what it returns\n", "_____no_output_____" ] ], [ [ "data_healthy_fft = np.fft.fft(data_healthy).real\ndata_broken_fft = np.fft.fft(data_broken).real", "_____no_output_____" ] ], [ [ "Let’s first have a look at the shape and contents of the arrays.", "_____no_output_____" ] ], [ [ "print (data_healthy_fft.shape)\nprint (data_healthy_fft)", "_____no_output_____" ] ], [ [ "First, we notice that the shape is the same as the input data. So if we have 3000 samples, we get back 3000 spectrum values, or in other words 3000 frequency bands with the intensities.\n\nThe second thing we notice is that the data type of the array entries is not float anymore, it is complex. So those are not complex numbers, it is just a means for the algorithm the return two different frequency compositions in one go. The real part returns a sine decomposition and the imaginary part a cosine. We will ignore the cosine part in this example since it turns out that the sine part already gives us enough information to implement a good classifier.\n\nBut first let’s plot the two arrays to get an idea how a healthy and broken frequency spectrum differ\n", "_____no_output_____" ] ], [ [ "fig, ax = plt.subplots(num=None, figsize=(14, 6), dpi=80, facecolor='w', edgecolor='k')\nsize = len(data_healthy_fft)\nax.plot(range(0,size), data_healthy_fft[:,0].real, '-', color='blue', animated = True, linewidth=1)\nax.plot(range(0,size), data_healthy_fft[:,1].real, '-', color='red', animated = True, linewidth=1)\nax.plot(range(0,size), data_healthy_fft[:,2].real, '-', color='green', animated = True, linewidth=1)", "_____no_output_____" ], [ "fig, ax = plt.subplots(num=None, figsize=(14, 6), dpi=80, facecolor='w', edgecolor='k')\nsize = len(data_healthy_fft)\nax.plot(range(0,size), data_broken_fft[:,0].real, '-', color='blue', animated = True, linewidth=1)\nax.plot(range(0,size), data_broken_fft[:,1].real, '-', color='red', animated = True, linewidth=1)\nax.plot(range(0,size), data_broken_fft[:,2].real, '-', color='green', animated = True, linewidth=1)", "_____no_output_____" ] ], [ [ "So, what we've been doing is so called feature transformation step. We’ve transformed the data set in a way that our machine learning algorithm – a deep feed forward neural network implemented as binary classifier – works better. So now let's scale the data to a 0..1", "_____no_output_____" ] ], [ [ "def scaleData(data):\n # normalize features\n scaler = MinMaxScaler(feature_range=(0, 1))\n return scaler.fit_transform(data)", "_____no_output_____" ] ], [ [ "And please don’t worry about the warnings. As explained before we don’t need the imaginary part of the FFT", "_____no_output_____" ] ], [ [ "data_healthy_scaled = scaleData(data_healthy_fft)\ndata_broken_scaled = scaleData(data_broken_fft)", "_____no_output_____" ], [ "data_healthy_scaled = data_healthy_scaled.T\ndata_broken_scaled = data_broken_scaled.T", "_____no_output_____" ] ], [ [ "Now we reshape again to have three examples (rows) and 3000 features (columns). It's important that you understand this. We have turned our initial data set which containd 3 columns (dimensions) of 3000 samples. Since we applied FFT on each column we've obtained 3000 spectrum values for each of the 3 three columns. We are now using each column with the 3000 spectrum values as one row (training example) and each of the 3000 spectrum values becomes a column (or feature) in the training data set", "_____no_output_____" ] ], [ [ "data_healthy_scaled.reshape(3, 3000)\ndata_broken_scaled.reshape(3, 3000)", "_____no_output_____" ] ], [ [ "# Start of Assignment\n\nThe first thing we need to do is to install a little helper library for submitting the solutions to the coursera grader:", "_____no_output_____" ] ], [ [ "!rm -f rklib.py\n!wget https://raw.githubusercontent.com/IBM/coursera/master/rklib.py", "_____no_output_____" ] ], [ [ "Please specify you email address you are using with cousera here:", "_____no_output_____" ] ], [ [ "from rklib import submit, submitAll\nkey = \"4vkB9vnrEee8zg4u9l99rA\"\nall_parts = [\"O5cR9\",\"0dXlH\",\"ZzEP8\"]\n\nemail = #### your code here ###", "_____no_output_____" ] ], [ [ "\n## Task\n\nGiven, the explanation above, please fill in the following two constants in order to make the neural network work properly", "_____no_output_____" ] ], [ [ "#### your code here ###\ndim = #### your code here ###\nsamples = #### your code here ###", "_____no_output_____" ] ], [ [ "### Submission\n\nNow it’s time to submit your first solution. Please make sure that the secret variable contains a valid submission token. You can obtain it from the courser web page of the course using the grader section of this assignment.\n", "_____no_output_____" ] ], [ [ "part = \"O5cR9\"\ntoken = #### your code here ### (have a look here if you need more information on how to obtain the token https://youtu.be/GcDo0Rwe06U?t=276)\n\nparts_data = {}\nparts_data[\"0dXlH\"] = json.dumps({\"number_of_neurons_layer1\": 0, \"number_of_neurons_layer2\": 0, \"number_of_neurons_layer3\": 0, \"number_of_epochs\": 0})\nparts_data[\"O5cR9\"] = json.dumps({\"dim\": dim, \"samples\": samples})\nparts_data[\"ZzEP8\"] = None \n\n\nsubmitAll(email, token, key, parts_data)", "_____no_output_____" ] ], [ [ "To observe how training works we just print the loss during training", "_____no_output_____" ] ], [ [ "class LossHistory(Callback):\n def on_train_begin(self, logs={}):\n self.losses = []\n\n def on_batch_end(self, batch, logs={}):\n sys.stdout.write(str(logs.get('loss'))+str(', '))\n sys.stdout.flush()\n self.losses.append(logs.get('loss'))\n \nlr = LossHistory()", "_____no_output_____" ] ], [ [ "## Task\n\nPlease fill in the following constants to properly configure the neural network. For some of them you have to find out the precise value, for others you can try and see how the neural network is performing at a later stage. The grader only looks at the values which need to be precise\n", "_____no_output_____" ] ], [ [ "number_of_neurons_layer1 = #### your code here ###\nnumber_of_neurons_layer2 = #### your code here ###\nnumber_of_neurons_layer3 = #### your code here ###\nnumber_of_epochs = #### your code here ###", "_____no_output_____" ] ], [ [ "### Submission\n\nPlease submit your constants to the grader", "_____no_output_____" ] ], [ [ "parts_data = {}\nparts_data[\"0dXlH\"] = json.dumps({\"number_of_neurons_layer1\": number_of_neurons_layer1, \"number_of_neurons_layer2\": number_of_neurons_layer2, \"number_of_neurons_layer3\": number_of_neurons_layer3, \"number_of_epochs\": number_of_epochs})\nparts_data[\"O5cR9\"] = json.dumps({\"dim\": dim, \"samples\": samples})\nparts_data[\"ZzEP8\"] = None \n \n \ntoken = #### your code here ###\n\n\nsubmitAll(email, token, key, parts_data)", "_____no_output_____" ] ], [ [ "## Task\n\nNow it’s time to create the model. Please fill in the placeholders. Please note since this is only a toy example, we don't use a separate corpus for training and testing. Just use the same data for fitting and scoring\n", "_____no_output_____" ] ], [ [ "# design network\nfrom tensorflow.keras import optimizers\nsgd = optimizers.SGD(lr=0.01, clipnorm=1.)\n\nmodel = Sequential()\nmodel.add(Dense(number_of_neurons_layer1,input_shape=(dim, ), activation='relu'))\nmodel.add(Dense(number_of_neurons_layer2, activation='relu'))\nmodel.add(Dense(number_of_neurons_layer3, activation='sigmoid'))\nmodel.compile(loss='binary_crossentropy', optimizer=sgd)\n\ndef train(data,label):\n model.fit(#### your code here ###, #### your code here ###, epochs=number_of_epochs, batch_size=72, validation_data=(data, label), verbose=0, shuffle=True,callbacks=[lr])\n\ndef score(data):\n return model.predict(data)", "_____no_output_____" ] ], [ [ "We prepare the training data by concatenating a label “0” for the broken and a label “1” for the healthy data. Finally we union the two data sets together", "_____no_output_____" ] ], [ [ "label_healthy = np.repeat(1,3)\nlabel_healthy.shape = (3,1)\nlabel_broken = np.repeat(0,3)\nlabel_broken.shape = (3,1)\n\ntrain_healthy = np.hstack((data_healthy_scaled,label_healthy))\ntrain_broken = np.hstack((data_broken_scaled,label_broken))\ntrain_both = np.vstack((train_healthy,train_broken))", "_____no_output_____" ] ], [ [ "Let’s have a look at the two training sets for broken and healthy and at the union of them. Note that the last column is the label", "_____no_output_____" ] ], [ [ "pd.DataFrame(train_healthy)", "_____no_output_____" ], [ "pd.DataFrame(train_broken)", "_____no_output_____" ], [ "pd.DataFrame(train_both)", "_____no_output_____" ] ], [ [ "So those are frequency bands. Notice that although many frequency bands are having nearly the same energy, the neural network algorithm still can work those out which are significantly different. \n\n## Task\n\nNow it’s time to do the training. Please provide the first 3000 columns of the array as the 1st parameter and column number 3000 containing the label as 2nd parameter. Please use the python array slicing syntax to obtain those. \n\nThe following link tells you more about the numpy array slicing syntax\nhttps://docs.scipy.org/doc/numpy-1.13.0/reference/arrays.indexing.html\n", "_____no_output_____" ] ], [ [ "features = train_both[:,#### your code here ###]\nlabels = train_both[:,#### your code here ###]", "_____no_output_____" ] ], [ [ "Now it’s time to do the training. You should see the loss trajectory go down, we will also plot it later. Note: We also could use TensorBoard for this but for this simple scenario we skip it. In some rare cases training doesn’t converge simply because random initialization of the weights caused gradient descent to start at a sub-optimal spot on the cost hyperplane. Just recreate the model (the cell which contains *model = Sequential()*) and re-run all subsequent steps and train again\n\n", "_____no_output_____" ] ], [ [ "train(features,labels)", "_____no_output_____" ] ], [ [ "Let's plot the losses", "_____no_output_____" ] ], [ [ "fig, ax = plt.subplots(num=None, figsize=(14, 6), dpi=80, facecolor='w', edgecolor='k')\nsize = len(lr.losses)\nax.plot(range(0,size), lr.losses, '-', color='blue', animated = True, linewidth=1)", "_____no_output_____" ] ], [ [ "Now let’s examine whether we are getting good results. Note: best practice is to use a training and a test data set for this which we’ve omitted here for simplicity", "_____no_output_____" ] ], [ [ "score(data_healthy_scaled)", "_____no_output_____" ], [ "score(data_broken_scaled)", "_____no_output_____" ] ], [ [ "### Submission\n\nIn case you feel confident that everything works as it should (getting values close to one for the healthy and close to zero for the broken case) you can make sure that the secret variable contains a valid submission token and submit your work to the grader\n", "_____no_output_____" ] ], [ [ "parts_data = {}\nparts_data[\"0dXlH\"] = json.dumps({\"number_of_neurons_layer1\": number_of_neurons_layer1, \"number_of_neurons_layer2\": number_of_neurons_layer2, \"number_of_neurons_layer3\": number_of_neurons_layer3, \"number_of_epochs\": number_of_epochs})\nparts_data[\"O5cR9\"] = json.dumps({\"dim\": dim, \"samples\": samples})\n\n \n \ntoken = #### your code here ###", "_____no_output_____" ], [ "prediction = str(np.sum(score(data_healthy_scaled))/3)\nmyData={'healthy' : prediction}\nmyData\nparts_data[\"ZzEP8\"] = json.dumps(myData)\nsubmitAll(email, token, key, parts_data)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
cbfdf6fc515ca6dd7bae5fdb7148cd00e72cc98f
585,852
ipynb
Jupyter Notebook
notebooks/allen_smFISH.ipynb
Xiaojieqiu/starfish
426480fcfeda4b8b1eb9371a818ff20275ac898d
[ "MIT" ]
1
2018-10-07T03:53:43.000Z
2018-10-07T03:53:43.000Z
notebooks/allen_smFISH.ipynb
Xiaojieqiu/starfish
426480fcfeda4b8b1eb9371a818ff20275ac898d
[ "MIT" ]
null
null
null
notebooks/allen_smFISH.ipynb
Xiaojieqiu/starfish
426480fcfeda4b8b1eb9371a818ff20275ac898d
[ "MIT" ]
null
null
null
1,167.035857
569,400
0.957192
[ [ [ "# Reproduce Allen smFISH results with Starfish\n\nThis notebook walks through a work flow that reproduces the smFISH result for one field of view using the starfish package. ", "_____no_output_____" ] ], [ [ "from copy import deepcopy\nfrom glob import glob\nimport json\nimport os\n\n%matplotlib inline\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nfrom scipy import ndimage as ndi\nfrom scipy import stats\nfrom skimage import (exposure, feature, filters, io, measure,\n morphology, restoration, segmentation, transform,\n util, img_as_float)\n\nfrom starfish.io import Stack\nfrom starfish.constants import Indices", "_____no_output_____" ], [ "# # developer note: for rapid iteration, it may be better to run this cell, download the data once, and load \n# # the data from the local disk. If so, uncomment this cell and run this instead of the above. \n# !aws s3 sync s3://czi.starfish.data.public/20180606/allen_smFISH ./allen_smFISH\n# experiment_json = os.path.abspath(\"./allen_smFISH/fov_001/experiment.json\")", "_____no_output_____" ], [ "# this is a large (1.1GB) FOV, so the download may take some time\nexperiment_json = 'https://dmf0bdeheu4zf.cloudfront.net/20180606/allen_smFISH/fov_001/experiment.json'", "_____no_output_____" ] ], [ [ "Load the Stack object, which while not well-named right now, should be thought of as an access point to an \"ImageDataSet\". In practice, we expect the Stack object or something similar to it to be an access point for _multiple_ fields of view. In practice, the thing we talk about as a \"TileSet\" is the `Stack.image` object. The data are currently stored in-memory in a `numpy.ndarray`, and that is where most of our operations are done. \n\nThe numpy array can be accessed through Stack.image.numpy\\_array (public method, read only) or Stack.image.\\_data (read and write)", "_____no_output_____" ] ], [ [ "codebook = pd.read_json('https://dmf0bdeheu4zf.cloudfront.net/20180606/allen_smFISH/fov_001/codebook.json')\ncodebook", "_____no_output_____" ] ], [ [ "We're ready now to load the experiment into starfish (This experiment is big, it takes a few minutes):", "_____no_output_____" ] ], [ [ "s = Stack()\ns.read(experiment_json)", "_____no_output_____" ] ], [ [ "All of our implemented operations leverage the `Stack.image.apply` method to apply a single function over each of the tiles or volumes in the FOV, depending on whether the method accepts a 2d or 3d array. Below, we're clipping each image independently at the 10th percentile. I've placed the imports next to the methods so that you can easily locate the code, should you want to look under the hood and understand what parameters have been chosen. \n\nThe verbose flag for our apply loops could use a bit more refinement. We should be able to tell it how many images it needs to process from looking at the image stack, but for now it's dumb so just reports the number of tiles or volumes it's processed. This FOV has 102 images over 3 volumes. ", "_____no_output_____" ] ], [ [ "from starfish.pipeline.filter import Filter\ns_clip = Filter.Clip(p_min=10, p_max=100, verbose=True)\ns_clip.filter(s.image)", "102it [00:05, 20.09it/s]\n" ] ], [ [ "We're still working through the backing of the Stack.image object with the on-disk or on-cloud Tile spec. As a result, most of our methods work in-place. For now, we can hack around this by deepcopying the data before administering the operation. This notebook was developed on a 64gb workstation, so be aware of the memory usage when copying!", "_____no_output_____" ] ], [ [ "# filtered_backup = deepcopy(s)", "_____no_output_____" ] ], [ [ "If you ever want to visualize the image in the notebook, we've added a widget to do that. The first parameter is an indices dict that specifies which hybridization round, channel, z-slice you want to view. The result is a pageable visualization across that arbitrary set of slices. Below I'm visualizing the first channel, which your codebook tells me is Nmnt. \n\n[N.B. once you click on the slider, you can page with the arrow keys on the keyboard.]", "_____no_output_____" ] ], [ [ "s.image.show_stack({Indices.CH: 0});", "Rescaling ...\n" ], [ "s_bandpass = Filter.Bandpass(lshort=0.5, llong=7, threshold=None, truncate=4, verbose=True)\ns_bandpass.filter(s.image)", "102it [00:26, 3.83it/s]\n" ] ], [ [ "For bandpass, there's a point where things get weird, at `c == 0; z <= 14`. In that range the images look mostly like noise. However, _above_ that, they look great + background subtracted! The later stages of the pipeline appear robust to this, though, as no spots are called for the noisy sections. ", "_____no_output_____" ] ], [ [ "# I wasn't sure if this clipping was supposed to be by volume or tile. I've done tile here, but it can be easily\n# switched to volume. \ns_clip = Filter.Clip(p_min=10, p_max=100, is_volume=False, verbose=True)\ns_clip.filter(s.image)", "102it [00:05, 20.12it/s]\n" ], [ "sigma=(1, 0, 0) # filter only in z, do nothing in x, y\nglp = Filter.GaussianLowPass(sigma=sigma, is_volume=True, verbose=True)\nglp.filter(s.image)", "3it [00:23, 8.00s/it]\n" ] ], [ [ "Below, because spot finding is so slow when single-plex, we'll pilot this on a max projection to show that the parameters work. Here's what trackpy.locate, which we wrap, produces for a z-projection of channel 1. To do use our plotting methods on z-projections we have to expose some of the starfish internals, which will be improved upon. ", "_____no_output_____" ] ], [ [ "from showit import image\nfrom trackpy import locate\n\n# grab a section from the tensor. \nch1 = s.image.max_proj(Indices.Z)[0, 1]\n\nresults = locate(ch1, diameter=3, minmass=250, maxsize=3, separation=5, preprocess=False, percentile=10) \nresults.columns = ['x', 'y', 'intensity', 'r', 'eccentricity', 'signal', 'raw_mass', 'ep']", "_____no_output_____" ], [ "# plot the z-projection\nf, ax = plt.subplots(figsize=(20, 20))\nax.imshow(ch1, vmin=15, vmax=52, cmap=plt.cm.gray)\n\n# draw called spots on top as red circles\n# scale radius plots the red circle at scale_radius * spot radius\ns.image._show_spots(results, ax=plt.gca(), scale_radius=7)", "_____no_output_____" ] ], [ [ "Below spot finding is on the _volumes_ for each channel. This will take about `11m30s`", "_____no_output_____" ] ], [ [ "from starfish.pipeline.features.spots.detector import SpotFinder\n\n# I've guessed at these parameters from the allen_smFISH code, but you might want to tweak these a bit. \n# as you can see, this function takes a while. It will be great to parallelize this. That's also coming, \n# although we haven't figured out where it fits in the priority list. \nkwargs = dict(\n spot_diameter=3, # must be odd integer\n min_mass=300,\n max_size=3, # this is max _radius_\n separation=5,\n noise_size=0.65, # this is not used because preprocess is False\n preprocess=False,\n percentile=10, # this is irrelevant when min_mass, spot_diameter, and max_size are set properly\n verbose=True,\n is_volume=True,\n)\nlmpf = SpotFinder.LocalMaxPeakFinder(**kwargs)\nspot_attributes = lmpf.find(s.image)", "3it [12:00, 234.46s/it]\n" ], [ "# save the results to disk as json\nfor attrs, (hyb, ch) in spot_attributes:\n attrs.save(f'spot_attributes_c{ch.value}.json')", "_____no_output_____" ], [ "# # if you want to load them back in the same shape, here's how:\n# from starfish.pipeline.features.spot_attributes import SpotAttributes\n# spot_attributes = [SpotAttributes.load(attrs) for attrs in glob('spot_attributes_c*.json')]", "_____no_output_____" ], [ "# this is not a very performant function because of how matplotlib renders circles as individual artists, \n# but I think it's useful for debugging the spot detection.\n\n# Note that in places where spots are \"missed\" it is often because they've been localized to individual \n# nearby z-planes, whereas most spots exist across several layers of z.\n\ns.image.show_stack({Indices.CH: 1, Indices.HYB: 0}, show_spots=spot_attributes[1][0], figure_size=(20, 20), p_min=60, p_max=99.9);", "Rescaling ...\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
cbfe0ef44a40d2fa9fbd4323f7a5f42bd4ab992c
155,224
ipynb
Jupyter Notebook
brain_classification/notebook.ipynb
Gloriel621/cs231n
4dc99fdadc44cc51b9a85e05a5b5c2d3468fadbf
[ "MIT" ]
null
null
null
brain_classification/notebook.ipynb
Gloriel621/cs231n
4dc99fdadc44cc51b9a85e05a5b5c2d3468fadbf
[ "MIT" ]
null
null
null
brain_classification/notebook.ipynb
Gloriel621/cs231n
4dc99fdadc44cc51b9a85e05a5b5c2d3468fadbf
[ "MIT" ]
null
null
null
245.219589
51,642
0.907514
[ [ [ "import numpy as np \nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport os\nimport PIL\nimport skimage as sk\nimport random", "_____no_output_____" ], [ "from PIL import Image\ndef prepare_dataset(path):\n #declare arrays\n x=[]\n y=[]\n \n # 이미지와 label을 리스트에 넣기\n data_folders = os.listdir(path)\n for folder in data_folders:\n full_path = os.path.join(path,folder)\n for img in os.listdir(full_path):\n image = Image.open(os.path.join(full_path,img)).convert('L') # 이미지를 그레이스케일로 변환\n image = image.resize((224,224),Image.ANTIALIAS)\n x.append(np.asarray(image))\n # 증상이 없으면 0, 있으면 1\n if('non' in full_path): \n y.append(0)\n else:\n y.append(1)\n \n x = np.asarray(x)\n y = np.asarray(y)\n \n \n return (x,y)\n \n\n(x_train,y_train) = prepare_dataset('Desktop/dataset/training_set/')\n(x_test,y_test) = prepare_dataset('Desktop/dataset/test_set/')\n(x_validation,y_validation) = prepare_dataset('Desktop/dataset/validation_set/')\n\n# 이미지의 shape\nprint(\"Shape of x_train {}\\nShape of x_test{}\\nShape of x_validation{}\".format(x_train.shape,x_test.shape,x_validation.shape))", "Shape of x_train (140, 224, 224)\nShape of x_test(20, 224, 224)\nShape of x_validation(40, 224, 224)\n" ], [ "# train 데이터셋에서 랜덤 이미지 선택해서 보여주기\ndef random_example(x,y,rows,cols):\n row = 0\n col = 0\n f, axarr = plt.subplots(rows,cols)\n\n for i in range(3):\n for k in range(2): \n rnd = random.randint(0,len(x))\n axarr[row,col].imshow(x[rnd],cmap='gray')\n if(y is not None):\n axarr[row,col].set_title(\"Has Tumor\" if y[rnd] == 1 else \"No Tumor\")\n col += 1\n col = 0\n row += 1\n\n f.tight_layout(pad=0.9,h_pad=2.0)\n\n plt.show()\n \nrandom_example(x_train,None,3,2)", "_____no_output_____" ], [ "# 이미지 변형을 통한 데이터 증폭\nclass Augmentation:\n def __init__(self):\n pass\n \n def random_rotation(self,data,label):\n # 왼쪽으로 25도, 오른쪽으로 25도 사이의 무작위 각도로 이미지 회전\n augmented_images = []\n augmented_label = []\n random_degree = random.uniform(-25, 25)\n counter = 0\n for img in data:\n img = sk.transform.rotate(img, random_degree)\n augmented_images.append(img)\n augmented_label.append(label[counter])\n counter += 1\n return (augmented_images,augmented_label)\n \n \n def random_noise(self,data,label):\n # 이미지에 무작위 노이즈 추가\n augmented_images = []\n augmented_label = []\n counter = 0\n for img in data:\n img = sk.util.random_noise(img)\n augmented_images.append(img)\n augmented_label.append(label[counter])\n counter += 1\n \n return (augmented_images,augmented_label)\n\n def horizontal_flip(self,data,label):\n # 이미지 좌우반전\n counter = 0\n augmented_images = []\n augmented_label = []\n for img in data:\n img = img[:, ::-1]\n augmented_images.append(img)\n augmented_label.append(label[counter])\n counter += 1\n return (augmented_images,augmented_label)\n \n def vertical_flip(self,data,label):\n # 이미지 상하반전\n counter = 0\n augmented_images = []\n augmented_label = []\n for img in data:\n img = np.flip(img)\n augmented_images.append(img)\n augmented_label.append(label[counter])\n counter += 1\n return (augmented_images,augmented_label)\n \n \n\nAUG = Augmentation()\n\n(x_noise,y_noise) = AUG.random_noise(x_train,y_train)\n(x_h_flipped,y_h_flipped) = AUG.horizontal_flip(x_train,y_train)\n(x_v_flipped,y_v_flipped) = AUG.vertical_flip(x_train,y_train)\n(x_rotated,y_rotated) = AUG.random_rotation(x_train,y_train)", "_____no_output_____" ], [ "# 노이즈 추가/상하좌우반전/로테이션한 데이터 합치기\n\nx_noise = np.asarray(x_noise)\nx_h_flipped = np.asarray(x_h_flipped)\nx_v_flipped = np.asarray(x_v_flipped)\nx_rotated = np.asarray(x_rotated)\n\nx_train = np.concatenate((x_train,x_noise,x_h_flipped,x_v_flipped,x_rotated),axis=0)\n\n#----------------------------------------------------------------------------------------------------------------------------------------------------------------\n\ny_noise = np.asarray(y_noise)\ny_h_flipped = np.asarray(y_h_flipped)\ny_v_flipped = np.asarray(y_v_flipped)\ny_rotated = np.asarray(y_rotated)\n\ny_train = np.concatenate((y_train,y_noise,y_h_flipped,y_v_flipped,y_rotated),axis=0)\n\nrandom_example(x_train,y_train,3,2)", "_____no_output_____" ], [ "# 텐서로 바꾸기\nimport torch\n\nx_train = torch.from_numpy(x_train)\nx_test = torch.from_numpy(x_test)\n\ny_train = torch.from_numpy(y_train)\ny_test = torch.from_numpy(y_test)\n\n\ntrain = torch.utils.data.TensorDataset(x_train,y_train) \ntrain_loader = torch.utils.data.DataLoader(train,batch_size=4,shuffle=True) \n\ntest = torch.utils.data.TensorDataset(x_test,y_test)\ntest_loader = torch.utils.data.DataLoader(test,batch_size=4,shuffle=False)", "_____no_output_____" ], [ "import torch.nn as nn\nimport torch.nn.functional as F\nimport torch.utils.data\n\n# Conv 레이어 함수 정의\n\ndef conv3x3(in_planes,out_planes,stride=1):\n return nn.Conv2d(in_planes,out_planes,kernel_size=3,stride=stride,padding=1,bias=False)\n\ndef conv1x1(in_planes,out_planes,stride=1):\n return nn.Conv2d(in_planes,out_planes,kernel_size=1,stride=stride,bias=False)\n\n# ResNet에 사용하는 단위 Block을 정의\nclass BasicBlock(nn.Module): \n\n expansion = 1\n\n def __init__(self,inplanes,planes,stride=1,downsample=None):\n super(BasicBlock,self).__init__()\n self.conv1 = conv3x3(inplanes,planes,stride)\n self.bn1 = nn.BatchNorm2d(planes)\n self.relu = nn.ReLU(inplace=True)\n self.drop = nn.Dropout(0.5)\n self.conv2 = conv3x3(planes,planes)\n self.bn2 = nn.BatchNorm2d(planes)\n self.downsample = downsample\n self.stride = stride\n\n def forward(self,x):\n identity = x\n out = self.conv1(x)\n out = self.bn1(out)\n out= self.relu(out)\n out = self.drop(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.drop(out)\n\n if(self.downsample is not None):\n identity = self.downsample(x)\n out += identity \n out = self.relu(out)\n\n return out", "_____no_output_____" ], [ "num_classes = 2\n# 모델로 ResNet 18 사용\nclass ResNet(nn.Module):\n\n def __init__(self,block,layers,num_classes=num_classes):\n super(ResNet,self).__init__()\n self.inplanes = 64 # according to research paper\n self.conv1 = nn.Conv2d(1,64,kernel_size=7,stride=2,padding=3,bias=False)\n self.bn1 = nn.BatchNorm2d(64)\n self.relu = nn.ReLU(inplace=True)\n self.maxpool = nn.MaxPool2d(kernel_size = 3,stride=2,padding=1)\n self.layer1 = self._make_layer(block,64,layers[0],stride=1)\n self.layer2 = self._make_layer(block,128,layers[1],stride=2)\n self.layer3 = self._make_layer(block,256,layers[2],stride=2)\n self.layer4 = self._make_layer(block,512,layers[3],stride=2)\n \n self.avgpooling = nn.AdaptiveAvgPool2d((1,1))\n self.fc = nn.Linear(512*block.expansion,num_classes)\n\n for m in self.modules(): \n if isinstance(m,nn.Conv2d):\n nn.init.kaiming_normal_(m.weight,mode=\"fan_out\",nonlinearity=\"relu\") \n elif isinstance(m,nn.BatchNorm2d):\n nn.init.constant_(m.weight,1)\n nn.init.constant_(m.bias, 0)\n\n\n def _make_layer(self,block,planes,num_layers,stride = 1):\n downsample = None\n if stride!=1 or self.inplanes != planes*block.expansion:\n downsample = nn.Sequential(\n conv1x1(self.inplanes,planes*block.expansion,stride),\n nn.BatchNorm2d(planes*block.expansion)\n )\n layers = []\n layers.append(block(self.inplanes,planes,stride,downsample))\n self.inplanes = planes*block.expansion\n for _ in range(1,len(layers)):\n layers.append(block(self.inplanes,planes))\n\n return nn.Sequential(*layers)\n\n def forward(self,x):\n x= self.conv1(x)\n x = self.bn1(x)\n x = self.relu(x)\n x=self.maxpool(x)\n\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n\n x = self.avgpooling(x)\n\n x = x.view(x.size(0),-1) #flatten\n x = self.fc(x)\n\n return x", "_____no_output_____" ], [ "# resnet 50\n# model = ResNet(BottleNeck,[3,4,6,3])\n\n# resnet 18\nmodel = ResNet(BasicBlock,[2,2,2,2],num_classes=2)\nmodel.cuda()\ndevice = torch.device(\"cuda\")\n\ncriterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(model.parameters(),0.0001) # 0.0001은 학습률\n\ntotal_step = len(train_loader)\nloss_list = []\ntrain_acc = []\ntest_acc = []\nbatch_size = 4\nfor epoch in range(300):\n for i,data in enumerate(train_loader,0):\n # i --->index , data ----> image\n inputs,labels= data\n try:\n inputs = inputs.view(batch_size,1,224,224)\n inputs = inputs.float()\n except:\n continue\n \n \n if torch.cuda.is_available():\n inputs, labels = inputs.to(device),labels.to(device, dtype = torch.long)\n\n #zero gradient\n optimizer.zero_grad()\n\n # forward\n outputs = model(inputs)\n # output과 label을 비교하여 loss 계산\n loss = criterion(outputs,labels) \n # backward\n loss.backward()\n # update weigths\n optimizer.step()\n if(i==(len(x_train)/batch_size)-1 and epoch%10 == 0):\n print(\"Epoch : {}\".format(epoch))\n \n correct = 0\n total = 0\n with torch.no_grad():\n for data in train_loader:\n images,labels = data\n try:\n images = images.view(batch_size,1,224,224)\n images = images.float()\n except:\n continue # 데이터셋 에러 처리\n \n if torch.cuda.is_available():\n images, labels = images.to(device), labels.to(device)\n\n outputs = model(images)\n _,predicted = torch.max(outputs.data,1) # max value index 반환\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n\n train_acc.append((100*correct/total))\n loss_list.append(loss.item())\n if(epoch % 10 == 0):\n print(\"Accuracy train: \",(100*correct/total))", "Epoch : 0\nAccuracy train: 59.42857142857143\nEpoch : 10\nAccuracy train: 64.71428571428571\nEpoch : 20\nAccuracy train: 69.57142857142857\nEpoch : 30\nAccuracy train: 73.85714285714286\nEpoch : 40\nAccuracy train: 74.14285714285714\nEpoch : 50\nAccuracy train: 78.57142857142857\nEpoch : 60\nAccuracy train: 76.57142857142857\nEpoch : 70\nAccuracy train: 76.42857142857143\nEpoch : 80\nAccuracy train: 78.57142857142857\nEpoch : 90\nAccuracy train: 79.85714285714286\nEpoch : 100\nAccuracy train: 81.0\nEpoch : 110\nAccuracy train: 78.71428571428571\nEpoch : 120\nAccuracy train: 82.42857142857143\nEpoch : 130\nAccuracy train: 81.14285714285714\nEpoch : 140\nAccuracy train: 82.14285714285714\nEpoch : 150\nAccuracy train: 83.14285714285714\nEpoch : 160\nAccuracy train: 83.28571428571429\nEpoch : 170\nAccuracy train: 86.0\nEpoch : 180\nAccuracy train: 82.28571428571429\nEpoch : 190\nAccuracy train: 83.14285714285714\nEpoch : 200\nAccuracy train: 82.85714285714286\nEpoch : 210\nAccuracy train: 81.42857142857143\nEpoch : 220\nAccuracy train: 84.14285714285714\nEpoch : 230\nAccuracy train: 85.0\nEpoch : 240\nAccuracy train: 86.14285714285714\nEpoch : 250\nAccuracy train: 85.0\nEpoch : 260\nAccuracy train: 85.85714285714286\nEpoch : 270\nAccuracy train: 82.85714285714286\nEpoch : 280\nAccuracy train: 83.71428571428571\nEpoch : 290\nAccuracy train: 86.28571428571429\n" ], [ "plt.subplot(2, 1, 1)\nplt.plot(loss_list)\nplt.title(\"Loss\")\n\nplt.subplot(2, 1, 2)\nplt.plot(np.array(train_acc)/100,label=\"Train Accuracy\",color='green')\nplt.title(\"Train Accuracy\")\n\nplt.tight_layout(pad=0.9,h_pad=2.0)\n\nplt.show()", "_____no_output_____" ], [ "correct = 0\ntotal = 0\nwith torch.no_grad():\n for data in test_loader:\n images,labels = data\n try:\n images = images.view(batch_size,1,224,224)\n images = images.float()\n except:\n continue\n\n if torch.cuda.is_available():\n images, labels = images.to(device), labels.to(device)\n\n outputs = model(images)\n _,predicted = torch.max(outputs.data,1)\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n test_acc.append((100*correct/total))\n\nprint(\"Test Accuracy: \",sum(test_acc)/len(test_acc))", "Test Accuracy: 68.75\n" ], [ "torch.save(model, \"brain_resnet18.pt\")", "_____no_output_____" ], [ "# 정확도가 그리 높지는 않음\n# 데이터셋 추가 확보 필요", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfe1041524af3b0dde828cb9c63ce99138eb365
6,543
ipynb
Jupyter Notebook
Assignments_2.ipynb
bipsec/Assignments
914a7016da1be707df3db6afa8da4bc1f697aaac
[ "MIT" ]
null
null
null
Assignments_2.ipynb
bipsec/Assignments
914a7016da1be707df3db6afa8da4bc1f697aaac
[ "MIT" ]
null
null
null
Assignments_2.ipynb
bipsec/Assignments
914a7016da1be707df3db6afa8da4bc1f697aaac
[ "MIT" ]
null
null
null
23.535971
97
0.405777
[ [ [ "#Answer of Question 1:\na = True\nb = False\ntype(a) , type(b)\n", "_____no_output_____" ], [ "#Answer of Question 2:\nand or not", "_____no_output_____" ], [ "# Answer of Question 3:\nTrue and True = True\nTrue and False = False\nFalse and True = False\nFalse and False = False\nTrue or True = True\nTrue or False = True\nFalse or True = True\nFalse or False = False\nnot True = False\nnot False = False", "_____no_output_____" ], [ "#Answer of Question 4:\n\n(5>4) and (3==5) ==> False\nnot (5>4) ==> False\n(5>4) or (3==5) ==> True\nnot (((5>4)) or (3==5)) ==> False\n(True and True) and (True == False) ==> False\n(not False) or (not True) ==> True", "_____no_output_____" ], [ "#Answer of Question 5:\n==, !=, >,<,>=,<=", "_____no_output_____" ], [ "#Answer of Question 6:\n\na = 5 # this equal refers to assinging values\nb = 5\n\nif a == b: ## here double equal is used for comparison\n print(\"Yes they are\")", "_____no_output_____" ], [ "#Answer of Question 7:\nspam = 0\n\nif spam ==10:\n print('eggs') ### block 1\nif spam > 5:\n print('bacon') ### block 2\nelse:\n print('ham') ### block 3 \n print('spam')\n print('spam')", "_____no_output_____" ], [ "#Answer of Question 8:\n\nspam = int(input())\n\nif spam == 1:\n print('Hello')\nelif spam == 2:\n print('Howdy')\nelse:\n print('Greetings!')", "_____no_output_____" ], [ "#Answer of Question 9:\nwhile True:\n print(\"Hi\")\n\n CTRL + C", "_____no_output_____" ], [ "#Answer of Question 10:\n'''\nbreak = if a condition is True, then 'break' breaks the loops and comes out of it\ncontinue = if a condition is True, continue just skips it and goes to the next step.\n'''", "_____no_output_____" ], [ "#Answer of Question 11:\n\nrange(10): starts: 0 and goes upto 9 excluding 10 having the step size 1.\nrange(0,10): start: 0 , end: 10 , excluding 10, step size = 1\nrange(0,10,1): same as above.\n", "_____no_output_____" ], [ "#Answer of Question 12:\nfor i in range(1,11):\n print(i)\n\n\n\nn = 1\nwhile n<11:\n print(n)\n n+=1", "_____no_output_____" ], [ "#Answer of Question 13:\nclass Spam:\n def __init__(self,hotdog,sandwitch):\n self.hotdog = hotdog\n self.sandwitch = sandwitch\n def bacon(self):\n if self.hotdog == \"Hot\":\n print(\"Wait a bit to have it! It can burn your mouth\")\n\n\nspam = Spam(\"Hot\",'overpriced')\n", "_____no_output_____" ], [ "spam.bacon() ## calling the function bacon :)", "Wait a bit to have it! It can burn your mouth\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfe1236f7523f3d484ad04ee311023425d9cc1d
104,401
ipynb
Jupyter Notebook
Moringa_Data_Science_Prep_W2_Independent_Project_2021_05_Lucy_Kinyua_SQL_Notebook.ipynb
LucyKinyua/Week2_MS
64ba8e6cb986de38debde5b96987e7cc88c27395
[ "MIT" ]
null
null
null
Moringa_Data_Science_Prep_W2_Independent_Project_2021_05_Lucy_Kinyua_SQL_Notebook.ipynb
LucyKinyua/Week2_MS
64ba8e6cb986de38debde5b96987e7cc88c27395
[ "MIT" ]
null
null
null
Moringa_Data_Science_Prep_W2_Independent_Project_2021_05_Lucy_Kinyua_SQL_Notebook.ipynb
LucyKinyua/Week2_MS
64ba8e6cb986de38debde5b96987e7cc88c27395
[ "MIT" ]
null
null
null
37.140164
471
0.293503
[ [ [ "<a href=\"https://colab.research.google.com/github/LucyKinyua/Week2_MS/blob/main/Moringa_Data_Science_Prep_W2_Independent_Project_2021_05_Lucy_Kinyua_SQL_Notebook.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# Overview", "_____no_output_____" ], [ "In this part of the assessment, you will act as a Data analyst to answer a research question on the US elections. \n\nFirst, will be required to write a Data Report on the process that will undertake while working on the given research problem. Using the CRISP-DM Methodology, you will document the various stages of the data science lifecycle for the given research problem while also providing your recommendation.\n\nYou have been provided with a detailed description of what you will be expected to do below.\n\nDo remember that complex tasks that may seem hard at first can often be broken down into a sequence of simple tasks, and there are workarounds to do what first seems “impossible” with a succession of known operations.", "_____no_output_____" ], [ "**Problem Description**\n\nYou have been asked to help a candidate to become US president. The winner of the election will be the candidate winning the most grand electors. \n\nGrand electors are attributed at the state level: in each of the 51 states, there is a given number of grand electors to win (roughly, but not exactly, proportional to the size of the state) and the presidential candidate receiving the most local votes wins ALL the Grand Electors in that state.\n\nBecause the number of grand electors is not exactly proportional to the population, some states can be prioritized to increase the return on investment of the campaign. We assume here there are only 2 candidates, and no history (no trend of certain states to vote for a particular candidate or party). Hence, each vote is equally \"expensive\" to get, but some states grant more grand elector per capita.\n\nYou are provided with 2 tables: one giving the number of Grand Electors per state, the other the population per state.\n\n- Grand Electors by State\n\n- Population by State\n\n\nYou are asked to identify the states that should be prioritized to win the election, with a smart but simple algorithm (brute force computation of all possibilities to find the absolute optimum is not accepted, as it would be to computationally expensive). It is ok not to find the overall optimum, we just want a strategy that makes sense.\n\n(This is of course a very simplistic view of reality, but it is a nice starting point to play with data and analyze possibilities).\n\nFirst take a few minutes to think about what you need to do. Steps will be suggested hereafter, but a big part of the data scientist's job is to identify the flow of operations when being asked a practical question, so it is important you start exercising on that in addition to programming.\n\nHere is what we are suggesting to do: we will rank states by decreasing number of grand electors per capita. The first states in the list will be the most valuable (you get a large number of grand electors by convincing a small number of people to vote for you). We will target all the states at the top of the list until the cumulative sum (also called running total) of grand electors won is larger than half the total number of Grand Electors in the country.", "_____no_output_____" ], [ "**Instructions**\n\nTo do that, we need (you are allowed to create as many intermediary tables as you want, to keep queries short):\n\n1. To join the 2 tables:\n- You notice States are not capitalized the same way in both tables (one is in uppercase letters, the other not), so you will first need to convert all to uppercase, for instance.\n- Now you can join the tables on the state key.\n2. Your boss wants you to change the name of the \"District of Columbia\" state to its short version \"DC\". Please do that.\n3. To compute the ratio between the number of grand electors and the population. Please create a new column with that ratio.\n4. To order the states by decreasing ratio of Grand Electors per capita. That will make our priority list.\n5. To compute the running total of Grand Electors in that sorted list.\n- Hint: you can get inspiration from here to compute a running total from here: https://stackoverflow.com/questions/21382766/cumulative-summing-values-in-sqlite\n6. Independently, to compute the half of the total of Grand Electors overall (in the whole country):\n- This is the threshold we need to reach for winning the presidential election.\n7. To filter our sorted list of states in order to keep only the (top) ones enabling us to reach the computed threshold. (the other states can be ignored). That is our target list.\n- Hint: You can do that in 2 steps:\n - Select all the states for which the running total is below or equal to the threshold.\n - Add the first state for which the running total is larger than the threshold.\n\nCan you draw some conclusions from the result? Is it in line with your expectations? How many states do you end up with in the target list? Is it a small or a large number? Do you think it would be a good recommendation to target those states?", "_____no_output_____" ], [ "# Connecting to database and importing data from CSV files", "_____no_output_____" ] ], [ [ "# Loading an sql extension to allow me to work with sql on Colaboratory;\n# \n%load_ext sql\n\n# Connecting to the in memory sqlite database;\n# \n%sql sqlite://", "_____no_output_____" ], [ "# Importing the python csv library to allow me to read csv file(s) that will have uploaded to this environment;\n# \nimport csv\n\n# Importing the pandas library to use for data manipulation in this notebook;\n# \nimport pandas as pd", "_____no_output_____" ], [ "# Uploading the Grand Electors csv file;\n# \nwith open('GrandElectors_by_state.csv','r') as f:\n GrandElectors_by_state = pd.read_csv(f, index_col=0, encoding='utf-8')\n\n%sql PERSIST GrandElectors_by_state;", " * sqlite://\n" ], [ "# Previewing the Grand_Electors table;\n# \n%%sql\nSELECT * FROM GrandElectors_by_state\nLIMIT 10;", " * sqlite://\nDone.\n" ], [ "# Uploading the Population csv file;\n# \nwith open('Population_by_state.csv','r') as f:\n Population_by_state = pd.read_csv(f, index_col=0, encoding='utf-8')\n\n%sql PERSIST Population_by_state;", " * sqlite://\n" ], [ "# Previewing the Population table;\n# \n%%sql\nSELECT * FROM Population_by_state\nLIMIT 10;", " * sqlite://\nDone.\n" ] ], [ [ "# Data Preparation", "_____no_output_____" ] ], [ [ "# Instruction 1:\n# Notice States are not capitalized the same way in both tables\n# Converting States in GrandElectors_by_state table to UPPERCASE;\n# \n%%sql\nUPDATE GrandElectors_by_state SET State = upper(State);\n\nSELECT * FROM GrandElectors_by_state;", " * sqlite://\n51 rows affected.\nDone.\n" ], [ "# Previewing the Grand_Electors table to confirm the update has taken effect;\n# \n%%sql\nSELECT * FROM GrandElectors_by_state LIMIT 10;", " * sqlite://\nDone.\n" ], [ "# Counting the number of States in the Grand_Electors table;\n# \n%%sql\nSELECT COUNT(State) FROM GrandElectors_by_state;", " * sqlite://\nDone.\n" ], [ "# Counting the number of States in the Population table;\n# \n%%sql\nSELECT COUNT(State) FROM Population_by_state;", " * sqlite://\nDone.\n" ], [ "# Notice that the number of States are not the same\n# This will be corrected after using the INNER JOIN function\n# The records from table one and table two would both be returned,...\n# ... but only if the values in column one of table one match the values in column one of table two.\n# Any records that do not have matching values would not be returned by an INNER JOIN.\n", "_____no_output_____" ], [ "# Joining both tables;\n# \n%%sql\nCREATE TABLE if not exists new_table AS SELECT STATE, POPULATION, GRAND_ELECTORS FROM\n(SELECT\nGrandElectors_by_state.State AS STATE,\nPopulation_by_state.Population AS POPULATION,\nGrandElectors_by_state.GrandElectors AS GRAND_ELECTORS\nFROM GrandElectors_by_state\nINNER JOIN Population_by_state ON GrandElectors_by_state.State = Population_by_state.State);\n\nSELECT * FROM new_table;", " * sqlite://\nDone.\nDone.\n" ], [ "# Previewing the new joint table to confirm the update has taken effect;\n# \n%%sql\nSELECT * FROM new_table;", " * sqlite://\nDone.\n" ] ], [ [ "# Modelling", "_____no_output_____" ] ], [ [ "# Instruction 2:\n# Changing the name of the \"District of Columbia\" State to its short version \"DC\".\n# \n%%sql\nUPDATE new_table\nSET STATE = \"DC\"\nWHERE STATE = \"DISTRICT OF COLUMBIA\";\n\nSELECT * FROM new_table;", " * sqlite://\n1 rows affected.\nDone.\n" ], [ "# Instruction 3:\n# To compute the ratio between the number of grand electors and the population.\n# Create a new column with that ratio.\n# \n# Instruction 4:\n# To order the states by decreasing ratio of Grand Electors per capita. That will make our priority list.\n# \n%%sql\nSELECT STATE, POPULATION, GRAND_ELECTORS,\n(POPULATION/GRAND_ELECTORS) AS \"POPULATION FOR EVERY 1 GRAND ELECTOR\"\nFROM new_table\nORDER BY \"POPULATION FOR EVERY 1 GRAND ELECTOR\" DESC;", " * sqlite://\nDone.\n" ], [ "# Instruction 5:\n# To compute the running total of Grand Electors in that sorted list.\n# \n%%sql\n", "_____no_output_____" ], [ "%%sql\nSELECT STATE, POPULATION, GRAND_ELECTORS, \"POPULATION FOR EVERY 1 GRAND ELECTOR\",\n SUM(GRAND_ELECTORS) \n OVER (PARTITION BY STATE ORDER BY \"POPULATION FOR EVERY 1 GRAND ELECTOR\" DESC)\n AS \"POPULATION FOR EVERY 1 GRAND ELECTOR\"\nFROM new_table;", " * sqlite://\n(sqlite3.OperationalError) near \"(\": syntax error\n[SQL: SELECT STATE, POPULATION, GRAND_ELECTORS, \"POPULATION FOR EVERY 1 GRAND ELECTOR\",\n SUM(GRAND_ELECTORS) \n OVER (PARTITION BY STATE ORDER BY \"POPULATION FOR EVERY 1 GRAND ELECTOR\" DESC)\n AS \"POPULATION FOR EVERY 1 GRAND ELECTOR\"\nFROM new_table;]\n(Background on this error at: http://sqlalche.me/e/14/e3q8)\n" ], [ "%%sql\nSELECT\n STATE,\n POPULATION,\n GRAND_ELECTORS,\n \"POPULATION FOR EVERY 1 GRAND ELECTOR\"\nSUM (GRAND_ELECTORS) OVER (ORDER BY \"POPULATION FOR EVERY 1 GRAND ELECTOR\") AS RUNNING_TOTAL\nFROM new_table;", " * sqlite://\n(sqlite3.OperationalError) near \"(\": syntax error\n[SQL: SELECT\n STATE,\n POPULATION,\n GRAND_ELECTORS,\n \"POPULATION FOR EVERY 1 GRAND ELECTOR\"\nSUM (GRAND_ELECTORS) OVER (ORDER BY \"POPULATION FOR EVERY 1 GRAND ELECTOR\") AS RUNNING_TOTAL\nFROM new_table;]\n(Background on this error at: http://sqlalche.me/e/14/e3q8)\n" ], [ "%%sql\nSELECT country, registration_date,registred_users,\n SUM(registred_users) \n OVER (PARTITION BY country ORDER BY registration_date)\n AS total_users\nFROM registration;\n\nSELECT registration_date,registred_users,\n SUM(registred_users) OVER (ORDER BY registration_date)\n AS total_users\nFROM registration;", "_____no_output_____" ], [ "SELECT\n t.Date,\n Sum(r.KeyColumn1),\n Sum(r.KeyColumn2),\n Sum(r.KeyColumn3)\nFROM (SELECT DISTINCT Date FROM MyTable) as t\nLeft Join MyTable as r On (r.Date < t.Date)\nGroup By t.Date;", "_____no_output_____" ], [ "%%sql \nSELECT SUM(GRAND_ELECTORS)\nFROM new_table;", " * sqlite://\nDone.\n" ], [ "# Instruction 6:\n# Compute half of the total of Grand Electors overall (in the whole country):\n# - This is the threshold we need to reach for winning the presidential election.\n# \n%%sql\nSELECT SUM(GRAND_ELECTORS)", "_____no_output_____" ], [ "# Instruction 7:\n# To filter our sorted list of states in order to keep only the (top) ones enabling us to reach the computed threshold.\n# (the other states can be ignored). That is our target list.\n# Hint: You can do that in 2 steps:\n# - Select all the states for which the running total is below or equal to the threshold.\n# - Add the first state for which the running total is larger than the threshold.\n# \n%%sql\nSELECT\nFROM new_table;", "_____no_output_____" ], [ "", "_____no_output_____" ] ], [ [ "# Evaluation", "_____no_output_____" ] ], [ [ "# Test against success criteria\n# \n", "_____no_output_____" ] ], [ [ "# Deployment", "_____no_output_____" ] ], [ [ "# Conclusion:\n# Can you draw some conclusions from the result?\n# Is it in line with your expectations?\n# How many states do you end up with in the target list?\n# Is it a small or a large number?\n# Do you think it would be a good recommendation to target those states?\n# \n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cbfe178cdcf551eff7f9b91efd34779665697cee
3,555
ipynb
Jupyter Notebook
kurstag10_Selenium1/10 Selenium, Projekte/01 Selenium Setup.ipynb
Priskawa/kurstag2
028d5b07011d7ddc2b2416aa40b7f94dee134614
[ "MIT" ]
null
null
null
kurstag10_Selenium1/10 Selenium, Projekte/01 Selenium Setup.ipynb
Priskawa/kurstag2
028d5b07011d7ddc2b2416aa40b7f94dee134614
[ "MIT" ]
null
null
null
kurstag10_Selenium1/10 Selenium, Projekte/01 Selenium Setup.ipynb
Priskawa/kurstag2
028d5b07011d7ddc2b2416aa40b7f94dee134614
[ "MIT" ]
null
null
null
23.388158
511
0.588467
[ [ [ "# Selenium Setup", "_____no_output_____" ], [ "Selenium wird eigentlich gebraucht, um Features von Websites zu testen. Aber weil er es erlaubt, mit einer Website zu interagieren, ist das Werkzeug oft die Lösung für alle Scraping-Probleme. Also bevor ihr mit BeautifulSoup schier verzweifelt, gebt Selenium eine Chance. Hier wollen uns zuerst um das Selenium setup kümmern. Die Anleitung scheint einfach. Ihr findet sie [hier](http://selenium-python.readthedocs.io/installation.html). Aber effektiv kann es etwas kompliziert werden, bis das Ganze läuft.", "_____no_output_____" ], [ "## Installation", "_____no_output_____" ], [ "- Am einfachsten ist es zu installieren mit ```!pip install selenium```\n- Und dann brauchen wir noch eine Browser. Ich arbeite jeweils am liebsten mit dem Chrome Browser oder Firefox. Ersteres lässt sich mit ```brew install chromedriver```installieren. Oder mit ```brew install geckodriver```für die Firefox-Variante. Wichtig ist es, sich den Pfad zu merken, der dann am Ende der Brew-Installation ausgespuckt wird. Diesen Pfad brauchen wir, wenn wir den Browser starten.", "_____no_output_____" ], [ "# Testen wir", "_____no_output_____" ] ], [ [ "from selenium import webdriver\nfrom selenium.webdriver.common.keys import Keys", "_____no_output_____" ], [ "driver = webdriver.Chrome('/usr/local/bin/chromedriver')", "_____no_output_____" ], [ "driver.get(\"http://www.python.org\")", "_____no_output_____" ], [ "elem = driver.find_element_by_name(\"q\")", "_____no_output_____" ], [ "elem.clear()", "_____no_output_____" ], [ "elem.send_keys(\"pycon\")", "_____no_output_____" ], [ "elem.send_keys(Keys.RETURN)", "_____no_output_____" ] ], [ [ "Hier bekommt ihr eine gute Übersicht der Python-Community und alle Anlässe. Es sind nicht alle Anlässe. Die Schweizer Anlässe fehlen zum Beispiel. Doch es sind viele dabei.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
cbfe2852d24058f9964b9361ea62e889b6efc144
262,706
ipynb
Jupyter Notebook
examples-ipynb/gtfs_reader/.ipynb_checkpoints/5-quetzal-importer-checkpoint.ipynb
systragroup/quetzal
bb7934bcae588cddf0f0da810d75114d1c64768f
[ "CECILL-B" ]
25
2018-11-20T16:33:02.000Z
2022-03-03T12:46:52.000Z
examples-ipynb/gtfs_reader/.ipynb_checkpoints/5-quetzal-importer-checkpoint.ipynb
systragroup/quetzal
bb7934bcae588cddf0f0da810d75114d1c64768f
[ "CECILL-B" ]
14
2019-06-13T13:26:20.000Z
2022-01-13T03:51:07.000Z
examples-ipynb/gtfs_reader/.ipynb_checkpoints/5-quetzal-importer-checkpoint.ipynb
systragroup/quetzal
bb7934bcae588cddf0f0da810d75114d1c64768f
[ "CECILL-B" ]
4
2020-01-31T18:34:59.000Z
2022-03-18T17:22:45.000Z
159.796837
159,384
0.877254
[ [ [ "qpath = r'../../' # path to quetzal here\ndata = r'inputs/'\nimport sys\nsys.path.insert(0, qpath)\n\n# import class\nfrom quetzal.io.gtfs_reader import importer", "C:\\Users\\qchasserieau\\AppData\\Local\\Continuum\\Anaconda3\\envs\\quetzal_env_c\\lib\\site-packages\\pyproj\\crs.py:422: FutureWarning: '+init=<authority>:<code>' syntax is deprecated. '<authority>:<code>' is the preferred initialization method.\n projstring = _prepare_from_string(projparams)\n" ], [ "import numpy as np", "_____no_output_____" ] ], [ [ "### Read GTFS", "_____no_output_____" ] ], [ [ "feed = importer.GtfsImporter(path=data + r'bilbao.zip', dist_units='m')\nfeed = feed.clean()\nfeed.describe()", "_____no_output_____" ], [ "feed.validate()", "_____no_output_____" ], [ "feed.map_stops(feed.stops.stop_id)", "_____no_output_____" ], [ "feed = feed.create_shapes()", "_____no_output_____" ], [ "feed.map_trips(\n feed.trips.groupby('route_id').first().trip_id.head(50)\n)", "_____no_output_____" ] ], [ [ "Frequency conversion currently work only for one specific service and date, and on one given time period. \nIt computes the average headway over this time period.", "_____no_output_____" ], [ "### Restrict to one date and merge services", "_____no_output_____" ] ], [ [ "feed = feed.restrict(dates=['20191210'])\nfeed.group_services()", "_____no_output_____" ], [ "feed.describe()", "_____no_output_____" ] ], [ [ "### Build simplified patterns by clustering stops", "_____no_output_____" ] ], [ [ "feed.build_stop_clusters(distance_threshold=300)", "_____no_output_____" ], [ "print('N stops:', len(feed.stops))\nprint('N clusters:', len(feed.stops.cluster_id.unique()))", "N stops: 498\nN clusters: 165\n" ], [ "feed.build_patterns(on='cluster_id')", "_____no_output_____" ], [ "print('N trips:', len(feed.trips))\nprint('N patterns:', len(feed.trips.pattern_id.unique()))\nprint('N routes:', len(feed.trips.route_id.unique()))", "N trips: 3297\nN patterns: 124\nN routes: 82\n" ], [ "feed.describe()", "_____no_output_____" ] ], [ [ "### Convert to frequencies", "_____no_output_____" ] ], [ [ "time_range = ['08:00:00', '10:00:00'] # time format must be HH:MM:SS\nfeed_f = feed.convert_to_frequencies(time_range=time_range)", "100%|██████████████████████████████████████████████████████████████████████████████████| 75/75 [00:00<00:00, 79.03it/s]\n" ], [ "feed_f.describe()", "_____no_output_____" ] ], [ [ "### Build nodes and links", "_____no_output_____" ] ], [ [ "feed_f.build_links_and_nodes()", "_____no_output_____" ], [ "feed_f.nodes.head(1).T", "_____no_output_____" ], [ "feed_f.links.head(1).T", "_____no_output_____" ] ], [ [ "## All in one\nWhile we recommand to build the nodes and links gradually by exploring the GTFS data first, it is also possible to do it in one line.", "_____no_output_____" ] ], [ [ "feed = importer.GtfsImporter(path=data + r'bilbao.zip', dist_units='m')\nfeed.describe()", "_____no_output_____" ], [ "imp = feed.build(\n date='20191210',\n time_range=['08:00:00', '10:00:00'],\n cluster_distance_threshold=300 # by default: None and no clustering\n)", "Restricting to date…\nGrouping services…\nCleaning…\nClustering stops…\nBuilding patterns…\n" ], [ "imp.describe()", "_____no_output_____" ], [ "imp.links.head()", "_____no_output_____" ], [ "imp.nodes.head()", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cbfe44726d473bb50cb902febb5a6ea4cc1a02fd
256,563
ipynb
Jupyter Notebook
40_cnn/30_week3/Car detection for Autonomous Driving/Autonomous_driving_application_Car_detection_v3a.ipynb
dwidjaja/50_deep_learning_specialization
56803a68c806c9d27823b811a424a9d0ee90edbd
[ "MIT" ]
null
null
null
40_cnn/30_week3/Car detection for Autonomous Driving/Autonomous_driving_application_Car_detection_v3a.ipynb
dwidjaja/50_deep_learning_specialization
56803a68c806c9d27823b811a424a9d0ee90edbd
[ "MIT" ]
null
null
null
40_cnn/30_week3/Car detection for Autonomous Driving/Autonomous_driving_application_Car_detection_v3a.ipynb
dwidjaja/50_deep_learning_specialization
56803a68c806c9d27823b811a424a9d0ee90edbd
[ "MIT" ]
null
null
null
166.491239
179,682
0.847449
[ [ [ "# Autonomous driving - Car detection\n\nWelcome to your week 3 programming assignment. You will learn about object detection using the very powerful YOLO model. Many of the ideas in this notebook are described in the two YOLO papers: [Redmon et al., 2016](https://arxiv.org/abs/1506.02640) and [Redmon and Farhadi, 2016](https://arxiv.org/abs/1612.08242). \n\n**You will learn to**:\n- Use object detection on a car detection dataset\n- Deal with bounding boxes\n\n", "_____no_output_____" ], [ "## <font color='darkblue'>Updates</font>\n\n#### If you were working on the notebook before this update...\n* The current notebook is version \"3a\".\n* You can find your original work saved in the notebook with the previous version name (\"v3\") \n* To view the file directory, go to the menu \"File->Open\", and this will open a new tab that shows the file directory.\n\n#### List of updates\n* Clarified \"YOLO\" instructions preceding the code. \n* Added details about anchor boxes.\n* Added explanation of how score is calculated.\n* `yolo_filter_boxes`: added additional hints. Clarify syntax for argmax and max.\n* `iou`: clarify instructions for finding the intersection.\n* `iou`: give variable names for all 8 box vertices, for clarity. Adds `width` and `height` variables for clarity.\n* `iou`: add test cases to check handling of non-intersecting boxes, intersection at vertices, or intersection at edges.\n* `yolo_non_max_suppression`: clarify syntax for tf.image.non_max_suppression and keras.gather.\n* \"convert output of the model to usable bounding box tensors\": Provides a link to the definition of `yolo_head`.\n* `predict`: hint on calling sess.run.\n* Spelling, grammar, wording and formatting updates to improve clarity.", "_____no_output_____" ], [ "## Import libraries\nRun the following cell to load the packages and dependencies that you will find useful as you build the object detector!", "_____no_output_____" ] ], [ [ "import argparse\nimport os\nimport matplotlib.pyplot as plt\nfrom matplotlib.pyplot import imshow\nimport scipy.io\nimport scipy.misc\nimport numpy as np\nimport pandas as pd\nimport PIL\nimport tensorflow as tf\nfrom keras import backend as K\nfrom keras.layers import Input, Lambda, Conv2D\nfrom keras.models import load_model, Model\nfrom yolo_utils import read_classes, read_anchors, generate_colors, preprocess_image, draw_boxes, scale_boxes\nfrom yad2k.models.keras_yolo import yolo_head, yolo_boxes_to_corners, preprocess_true_boxes, yolo_loss, yolo_body\n\n%matplotlib inline", "Using TensorFlow backend.\n" ] ], [ [ "**Important Note**: As you can see, we import Keras's backend as K. This means that to use a Keras function in this notebook, you will need to write: `K.function(...)`.", "_____no_output_____" ], [ "## 1 - Problem Statement\n\nYou are working on a self-driving car. As a critical component of this project, you'd like to first build a car detection system. To collect data, you've mounted a camera to the hood (meaning the front) of the car, which takes pictures of the road ahead every few seconds while you drive around. \n\n<center>\n<video width=\"400\" height=\"200\" src=\"nb_images/road_video_compressed2.mp4\" type=\"video/mp4\" controls>\n</video>\n</center>\n\n<caption><center> Pictures taken from a car-mounted camera while driving around Silicon Valley. <br> We thank [drive.ai](htps://www.drive.ai/) for providing this dataset.\n</center></caption>\n\nYou've gathered all these images into a folder and have labelled them by drawing bounding boxes around every car you found. Here's an example of what your bounding boxes look like.\n\n<img src=\"nb_images/box_label.png\" style=\"width:500px;height:250;\">\n<caption><center> <u> **Figure 1** </u>: **Definition of a box**<br> </center></caption>\n\nIf you have 80 classes that you want the object detector to recognize, you can represent the class label $c$ either as an integer from 1 to 80, or as an 80-dimensional vector (with 80 numbers) one component of which is 1 and the rest of which are 0. The video lectures had used the latter representation; in this notebook, we will use both representations, depending on which is more convenient for a particular step. \n\nIn this exercise, you will learn how \"You Only Look Once\" (YOLO) performs object detection, and then apply it to car detection. Because the YOLO model is very computationally expensive to train, we will load pre-trained weights for you to use. ", "_____no_output_____" ], [ "## 2 - YOLO", "_____no_output_____" ], [ "\"You Only Look Once\" (YOLO) is a popular algorithm because it achieves high accuracy while also being able to run in real-time. This algorithm \"only looks once\" at the image in the sense that it requires only one forward propagation pass through the network to make predictions. After non-max suppression, it then outputs recognized objects together with the bounding boxes.\n\n### 2.1 - Model details\n\n#### Inputs and outputs\n- The **input** is a batch of images, and each image has the shape (m, 608, 608, 3)\n- The **output** is a list of bounding boxes along with the recognized classes. Each bounding box is represented by 6 numbers $(p_c, b_x, b_y, b_h, b_w, c)$ as explained above. If you expand $c$ into an 80-dimensional vector, each bounding box is then represented by 85 numbers. \n\n#### Anchor Boxes\n* Anchor boxes are chosen by exploring the training data to choose reasonable height/width ratios that represent the different classes. For this assignment, 5 anchor boxes were chosen for you (to cover the 80 classes), and stored in the file './model_data/yolo_anchors.txt'\n* The dimension for anchor boxes is the second to last dimension in the encoding: $(m, n_H,n_W,anchors,classes)$.\n* The YOLO architecture is: IMAGE (m, 608, 608, 3) -> DEEP CNN -> ENCODING (m, 19, 19, 5, 85). \n\n\n#### Encoding\nLet's look in greater detail at what this encoding represents. \n\n<img src=\"nb_images/architecture.png\" style=\"width:700px;height:400;\">\n<caption><center> <u> **Figure 2** </u>: **Encoding architecture for YOLO**<br> </center></caption>\n\nIf the center/midpoint of an object falls into a grid cell, that grid cell is responsible for detecting that object.", "_____no_output_____" ], [ "Since we are using 5 anchor boxes, each of the 19 x19 cells thus encodes information about 5 boxes. Anchor boxes are defined only by their width and height.\n\nFor simplicity, we will flatten the last two last dimensions of the shape (19, 19, 5, 85) encoding. So the output of the Deep CNN is (19, 19, 425).\n\n<img src=\"nb_images/flatten.png\" style=\"width:700px;height:400;\">\n<caption><center> <u> **Figure 3** </u>: **Flattening the last two last dimensions**<br> </center></caption>", "_____no_output_____" ], [ "#### Class score\n\nNow, for each box (of each cell) we will compute the following element-wise product and extract a probability that the box contains a certain class. \nThe class score is $score_{c,i} = p_{c} \\times c_{i}$: the probability that there is an object $p_{c}$ times the probability that the object is a certain class $c_{i}$.\n\n<img src=\"nb_images/probability_extraction.png\" style=\"width:700px;height:400;\">\n<caption><center> <u> **Figure 4** </u>: **Find the class detected by each box**<br> </center></caption>\n\n##### Example of figure 4\n* In figure 4, let's say for box 1 (cell 1), the probability that an object exists is $p_{1}=0.60$. So there's a 60% chance that an object exists in box 1 (cell 1). \n* The probability that the object is the class \"category 3 (a car)\" is $c_{3}=0.73$. \n* The score for box 1 and for category \"3\" is $score_{1,3}=0.60 \\times 0.73 = 0.44$. \n* Let's say we calculate the score for all 80 classes in box 1, and find that the score for the car class (class 3) is the maximum. So we'll assign the score 0.44 and class \"3\" to this box \"1\".\n\n#### Visualizing classes\nHere's one way to visualize what YOLO is predicting on an image:\n- For each of the 19x19 grid cells, find the maximum of the probability scores (taking a max across the 80 classes, one maximum for each of the 5 anchor boxes).\n- Color that grid cell according to what object that grid cell considers the most likely.\n\nDoing this results in this picture: \n\n<img src=\"nb_images/proba_map.png\" style=\"width:300px;height:300;\">\n<caption><center> <u> **Figure 5** </u>: Each one of the 19x19 grid cells is colored according to which class has the largest predicted probability in that cell.<br> </center></caption>\n\nNote that this visualization isn't a core part of the YOLO algorithm itself for making predictions; it's just a nice way of visualizing an intermediate result of the algorithm. \n", "_____no_output_____" ], [ "#### Visualizing bounding boxes\nAnother way to visualize YOLO's output is to plot the bounding boxes that it outputs. Doing that results in a visualization like this: \n\n<img src=\"nb_images/anchor_map.png\" style=\"width:200px;height:200;\">\n<caption><center> <u> **Figure 6** </u>: Each cell gives you 5 boxes. In total, the model predicts: 19x19x5 = 1805 boxes just by looking once at the image (one forward pass through the network)! Different colors denote different classes. <br> </center></caption>\n\n#### Non-Max suppression\nIn the figure above, we plotted only boxes for which the model had assigned a high probability, but this is still too many boxes. You'd like to reduce the algorithm's output to a much smaller number of detected objects. \n\nTo do so, you'll use **non-max suppression**. Specifically, you'll carry out these steps: \n- Get rid of boxes with a low score (meaning, the box is not very confident about detecting a class; either due to the low probability of any object, or low probability of this particular class).\n- Select only one box when several boxes overlap with each other and detect the same object.\n\n", "_____no_output_____" ], [ "### 2.2 - Filtering with a threshold on class scores\n\nYou are going to first apply a filter by thresholding. You would like to get rid of any box for which the class \"score\" is less than a chosen threshold. \n\nThe model gives you a total of 19x19x5x85 numbers, with each box described by 85 numbers. It is convenient to rearrange the (19,19,5,85) (or (19,19,425)) dimensional tensor into the following variables: \n- `box_confidence`: tensor of shape $(19 \\times 19, 5, 1)$ containing $p_c$ (confidence probability that there's some object) for each of the 5 boxes predicted in each of the 19x19 cells.\n- `boxes`: tensor of shape $(19 \\times 19, 5, 4)$ containing the midpoint and dimensions $(b_x, b_y, b_h, b_w)$ for each of the 5 boxes in each cell.\n- `box_class_probs`: tensor of shape $(19 \\times 19, 5, 80)$ containing the \"class probabilities\" $(c_1, c_2, ... c_{80})$ for each of the 80 classes for each of the 5 boxes per cell.\n\n#### **Exercise**: Implement `yolo_filter_boxes()`.\n1. Compute box scores by doing the elementwise product as described in Figure 4 ($p \\times c$). \nThe following code may help you choose the right operator: \n```python\na = np.random.randn(19*19, 5, 1)\nb = np.random.randn(19*19, 5, 80)\nc = a * b # shape of c will be (19*19, 5, 80)\n```\nThis is an example of **broadcasting** (multiplying vectors of different sizes).\n\n2. For each box, find:\n - the index of the class with the maximum box score\n - the corresponding box score\n \n **Useful references**\n * [Keras argmax](https://keras.io/backend/#argmax)\n * [Keras max](https://keras.io/backend/#max)\n\n **Additional Hints**\n * For the `axis` parameter of `argmax` and `max`, if you want to select the **last** axis, one way to do so is to set `axis=-1`. This is similar to Python array indexing, where you can select the last position of an array using `arrayname[-1]`.\n * Applying `max` normally collapses the axis for which the maximum is applied. `keepdims=False` is the default option, and allows that dimension to be removed. We don't need to keep the last dimension after applying the maximum here.\n * Even though the documentation shows `keras.backend.argmax`, use `keras.argmax`. Similarly, use `keras.max`.\n\n\n3. Create a mask by using a threshold. As a reminder: `([0.9, 0.3, 0.4, 0.5, 0.1] < 0.4)` returns: `[False, True, False, False, True]`. The mask should be True for the boxes you want to keep. \n\n4. Use TensorFlow to apply the mask to `box_class_scores`, `boxes` and `box_classes` to filter out the boxes we don't want. You should be left with just the subset of boxes you want to keep. \n\n **Useful reference**:\n * [boolean mask](https://www.tensorflow.org/api_docs/python/tf/boolean_mask) \n\n **Additional Hints**: \n * For the `tf.boolean_mask`, we can keep the default `axis=None`.\n\n**Reminder**: to call a Keras function, you should use `K.function(...)`.", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: yolo_filter_boxes\n\ndef yolo_filter_boxes(box_confidence, boxes, box_class_probs, threshold = .6):\n \"\"\"Filters YOLO boxes by thresholding on object and class confidence.\n \n Arguments:\n box_confidence -- tensor of shape (19, 19, 5, 1)\n boxes -- tensor of shape (19, 19, 5, 4)\n box_class_probs -- tensor of shape (19, 19, 5, 80)\n threshold -- real value, if [ highest class probability score < threshold], then get rid of the corresponding box\n \n Returns:\n scores -- tensor of shape (None,), containing the class probability score for selected boxes\n boxes -- tensor of shape (None, 4), containing (b_x, b_y, b_h, b_w) coordinates of selected boxes\n classes -- tensor of shape (None,), containing the index of the class detected by the selected boxes\n \n Note: \"None\" is here because you don't know the exact number of selected boxes, as it depends on the threshold. \n For example, the actual output size of scores would be (10,) if there are 10 boxes.\n \"\"\"\n \n # Step 1: Compute box scores\n ### START CODE HERE ### (≈ 1 line)\n box_scores = box_confidence * box_class_probs\n ### END CODE HERE ###\n \n # Step 2: Find the box_classes using the max box_scores, keep track of the corresponding score\n ### START CODE HERE ### (≈ 2 lines)\n box_classes = K.argmax(box_scores, axis=-1)\n box_class_scores = K.max(box_scores, axis=-1, keepdims=False)\n ### END CODE HERE ###\n \n # Step 3: Create a filtering mask based on \"box_class_scores\" by using \"threshold\". The mask should have the\n # same dimension as box_class_scores, and be True for the boxes you want to keep (with probability >= threshold)\n ### START CODE HERE ### (≈ 1 line)\n filtering_mask = box_class_scores >= threshold\n ### END CODE HERE ###\n \n # Step 4: Apply the mask to box_class_scores, boxes and box_classes\n ### START CODE HERE ### (≈ 3 lines)\n scores = tf.boolean_mask(box_class_scores, filtering_mask)\n boxes = tf.boolean_mask(boxes, filtering_mask)\n classes = tf.boolean_mask(box_classes, filtering_mask)\n ### END CODE HERE ###\n \n return scores, boxes, classes", "_____no_output_____" ], [ "with tf.Session() as test_a:\n box_confidence = tf.random_normal([19, 19, 5, 1], mean=1, stddev=4, seed = 1)\n boxes = tf.random_normal([19, 19, 5, 4], mean=1, stddev=4, seed = 1)\n box_class_probs = tf.random_normal([19, 19, 5, 80], mean=1, stddev=4, seed = 1)\n scores, boxes, classes = yolo_filter_boxes(box_confidence, boxes, box_class_probs, threshold = 0.5)\n print(\"scores[2] = \" + str(scores[2].eval()))\n print(\"boxes[2] = \" + str(boxes[2].eval()))\n print(\"classes[2] = \" + str(classes[2].eval()))\n print(\"scores.shape = \" + str(scores.shape))\n print(\"boxes.shape = \" + str(boxes.shape))\n print(\"classes.shape = \" + str(classes.shape))", "scores[2] = 10.7506\nboxes[2] = [ 8.42653275 3.27136683 -0.5313437 -4.94137383]\nclasses[2] = 7\nscores.shape = (?,)\nboxes.shape = (?, 4)\nclasses.shape = (?,)\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **scores[2]**\n </td>\n <td>\n 10.7506\n </td>\n </tr>\n <tr>\n <td>\n **boxes[2]**\n </td>\n <td>\n [ 8.42653275 3.27136683 -0.5313437 -4.94137383]\n </td>\n </tr>\n\n <tr>\n <td>\n **classes[2]**\n </td>\n <td>\n 7\n </td>\n </tr>\n <tr>\n <td>\n **scores.shape**\n </td>\n <td>\n (?,)\n </td>\n </tr>\n <tr>\n <td>\n **boxes.shape**\n </td>\n <td>\n (?, 4)\n </td>\n </tr>\n\n <tr>\n <td>\n **classes.shape**\n </td>\n <td>\n (?,)\n </td>\n </tr>\n\n</table>", "_____no_output_____" ], [ "**Note** In the test for `yolo_filter_boxes`, we're using random numbers to test the function. In real data, the `box_class_probs` would contain non-zero values between 0 and 1 for the probabilities. The box coordinates in `boxes` would also be chosen so that lengths and heights are non-negative.", "_____no_output_____" ], [ "### 2.3 - Non-max suppression ###\n\nEven after filtering by thresholding over the class scores, you still end up with a lot of overlapping boxes. A second filter for selecting the right boxes is called non-maximum suppression (NMS). ", "_____no_output_____" ], [ "<img src=\"nb_images/non-max-suppression.png\" style=\"width:500px;height:400;\">\n<caption><center> <u> **Figure 7** </u>: In this example, the model has predicted 3 cars, but it's actually 3 predictions of the same car. Running non-max suppression (NMS) will select only the most accurate (highest probability) of the 3 boxes. <br> </center></caption>\n", "_____no_output_____" ], [ "Non-max suppression uses the very important function called **\"Intersection over Union\"**, or IoU.\n<img src=\"nb_images/iou.png\" style=\"width:500px;height:400;\">\n<caption><center> <u> **Figure 8** </u>: Definition of \"Intersection over Union\". <br> </center></caption>\n\n#### **Exercise**: Implement iou(). Some hints:\n- In this code, we use the convention that (0,0) is the top-left corner of an image, (1,0) is the upper-right corner, and (1,1) is the lower-right corner. In other words, the (0,0) origin starts at the top left corner of the image. As x increases, we move to the right. As y increases, we move down.\n- For this exercise, we define a box using its two corners: upper left $(x_1, y_1)$ and lower right $(x_2,y_2)$, instead of using the midpoint, height and width. (This makes it a bit easier to calculate the intersection).\n- To calculate the area of a rectangle, multiply its height $(y_2 - y_1)$ by its width $(x_2 - x_1)$. (Since $(x_1,y_1)$ is the top left and $x_2,y_2$ are the bottom right, these differences should be non-negative.\n- To find the **intersection** of the two boxes $(xi_{1}, yi_{1}, xi_{2}, yi_{2})$: \n - Feel free to draw some examples on paper to clarify this conceptually.\n - The top left corner of the intersection $(xi_{1}, yi_{1})$ is found by comparing the top left corners $(x_1, y_1)$ of the two boxes and finding a vertex that has an x-coordinate that is closer to the right, and y-coordinate that is closer to the bottom.\n - The bottom right corner of the intersection $(xi_{2}, yi_{2})$ is found by comparing the bottom right corners $(x_2,y_2)$ of the two boxes and finding a vertex whose x-coordinate is closer to the left, and the y-coordinate that is closer to the top.\n - The two boxes **may have no intersection**. You can detect this if the intersection coordinates you calculate end up being the top right and/or bottom left corners of an intersection box. Another way to think of this is if you calculate the height $(y_2 - y_1)$ or width $(x_2 - x_1)$ and find that at least one of these lengths is negative, then there is no intersection (intersection area is zero). \n - The two boxes may intersect at the **edges or vertices**, in which case the intersection area is still zero. This happens when either the height or width (or both) of the calculated intersection is zero.\n\n\n**Additional Hints**\n\n- `xi1` = **max**imum of the x1 coordinates of the two boxes\n- `yi1` = **max**imum of the y1 coordinates of the two boxes\n- `xi2` = **min**imum of the x2 coordinates of the two boxes\n- `yi2` = **min**imum of the y2 coordinates of the two boxes\n- `inter_area` = You can use `max(height, 0)` and `max(width, 0)`\n", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: iou\n\ndef iou(box1, box2):\n \"\"\"Implement the intersection over union (IoU) between box1 and box2\n    \n Arguments:\n box1 -- first box, list object with coordinates (box1_x1, box1_y1, box1_x2, box_1_y2)\n    box2 -- second box, list object with coordinates (box2_x1, box2_y1, box2_x2, box2_y2)\n    \"\"\"\n\n # Assign variable names to coordinates for clarity\n (box1_x1, box1_y1, box1_x2, box1_y2) = box1\n (box2_x1, box2_y1, box2_x2, box2_y2) = box2\n \n # Calculate the (yi1, xi1, yi2, xi2) coordinates of the intersection of box1 and box2. Calculate its Area.\n ### START CODE HERE ### (≈ 7 lines)\n xi1 = max(box1[0], box2[0])\n yi1 = max(box1[1], box2[1])\n xi2 = min(box1[2], box2[2])\n yi2 = min(box1[3], box2[3])\n inter_width = max(xi2-xi1, 0)\n inter_height = max(yi2-yi1, 0)\n inter_area = inter_width * inter_height\n ### END CODE HERE ###    \n\n # Calculate the Union area by using Formula: Union(A,B) = A + B - Inter(A,B)\n ### START CODE HERE ### (≈ 3 lines)\n box1_area = (box1[2] - box1[0]) * (box1[3] - box1[1])\n box2_area = (box2[2] - box2[0]) * (box2[3] - box2[1])\n union_area = box1_area + box2_area - inter_area\n ### END CODE HERE ###\n \n # compute the IoU\n ### START CODE HERE ### (≈ 1 line)\n iou = inter_area/union_area\n ### END CODE HERE ###\n \n return iou", "_____no_output_____" ], [ "## Test case 1: boxes intersect\nbox1 = (2, 1, 4, 3)\nbox2 = (1, 2, 3, 4) \nprint(\"iou for intersecting boxes = \" + str(iou(box1, box2)))\n\n## Test case 2: boxes do not intersect\nbox1 = (1,2,3,4)\nbox2 = (5,6,7,8)\nprint(\"iou for non-intersecting boxes = \" + str(iou(box1,box2)))\n\n## Test case 3: boxes intersect at vertices only\nbox1 = (1,1,2,2)\nbox2 = (2,2,3,3)\nprint(\"iou for boxes that only touch at vertices = \" + str(iou(box1,box2)))\n\n## Test case 4: boxes intersect at edge only\nbox1 = (1,1,3,3)\nbox2 = (2,3,3,4)\nprint(\"iou for boxes that only touch at edges = \" + str(iou(box1,box2)))", "iou for intersecting boxes = 0.14285714285714285\niou for non-intersecting boxes = 0.0\niou for boxes that only touch at vertices = 0.0\niou for boxes that only touch at edges = 0.0\n" ] ], [ [ "**Expected Output**:\n\n```\niou for intersecting boxes = 0.14285714285714285\niou for non-intersecting boxes = 0.0\niou for boxes that only touch at vertices = 0.0\niou for boxes that only touch at edges = 0.0\n```", "_____no_output_____" ], [ "#### YOLO non-max suppression\n\nYou are now ready to implement non-max suppression. The key steps are: \n1. Select the box that has the highest score.\n2. Compute the overlap of this box with all other boxes, and remove boxes that overlap significantly (iou >= `iou_threshold`).\n3. Go back to step 1 and iterate until there are no more boxes with a lower score than the currently selected box.\n\nThis will remove all boxes that have a large overlap with the selected boxes. Only the \"best\" boxes remain.\n\n**Exercise**: Implement yolo_non_max_suppression() using TensorFlow. TensorFlow has two built-in functions that are used to implement non-max suppression (so you don't actually need to use your `iou()` implementation):\n\n** Reference documentation ** \n\n- [tf.image.non_max_suppression()](https://www.tensorflow.org/api_docs/python/tf/image/non_max_suppression)\n```\ntf.image.non_max_suppression(\n boxes,\n scores,\n max_output_size,\n iou_threshold=0.5,\n name=None\n)\n```\nNote that in the version of tensorflow used here, there is no parameter `score_threshold` (it's shown in the documentation for the latest version) so trying to set this value will result in an error message: *got an unexpected keyword argument 'score_threshold.*\n\n- [K.gather()](https://www.tensorflow.org/api_docs/python/tf/keras/backend/gather) \nEven though the documentation shows `tf.keras.backend.gather()`, you can use `keras.gather()`. \n```\nkeras.gather(\n reference,\n indices\n)\n```", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: yolo_non_max_suppression\n\ndef yolo_non_max_suppression(scores, boxes, classes, max_boxes = 10, iou_threshold = 0.5):\n \"\"\"\n Applies Non-max suppression (NMS) to set of boxes\n \n Arguments:\n scores -- tensor of shape (None,), output of yolo_filter_boxes()\n boxes -- tensor of shape (None, 4), output of yolo_filter_boxes() that have been scaled to the image size (see later)\n classes -- tensor of shape (None,), output of yolo_filter_boxes()\n max_boxes -- integer, maximum number of predicted boxes you'd like\n iou_threshold -- real value, \"intersection over union\" threshold used for NMS filtering\n \n Returns:\n scores -- tensor of shape (, None), predicted score for each box\n boxes -- tensor of shape (4, None), predicted box coordinates\n classes -- tensor of shape (, None), predicted class for each box\n \n Note: The \"None\" dimension of the output tensors has obviously to be less than max_boxes. Note also that this\n function will transpose the shapes of scores, boxes, classes. This is made for convenience.\n \"\"\"\n \n max_boxes_tensor = K.variable(max_boxes, dtype='int32') # tensor to be used in tf.image.non_max_suppression()\n K.get_session().run(tf.variables_initializer([max_boxes_tensor])) # initialize variable max_boxes_tensor\n \n # Use tf.image.non_max_suppression() to get the list of indices corresponding to boxes you keep\n ### START CODE HERE ### (≈ 1 line)\n nms_indices = tf.image.non_max_suppression(boxes = boxes, scores = scores, max_output_size = max_boxes, iou_threshold = iou_threshold)\n ### END CODE HERE ###\n \n # Use K.gather() to select only nms_indices from scores, boxes and classes\n ### START CODE HERE ### (≈ 3 lines)\n scores = K.gather(scores, nms_indices)\n boxes = K.gather(boxes, nms_indices)\n classes = K.gather(classes, nms_indices)\n ### END CODE HERE ###\n \n return scores, boxes, classes", "_____no_output_____" ], [ "with tf.Session() as test_b:\n scores = tf.random_normal([54,], mean=1, stddev=4, seed = 1)\n boxes = tf.random_normal([54, 4], mean=1, stddev=4, seed = 1)\n classes = tf.random_normal([54,], mean=1, stddev=4, seed = 1)\n scores, boxes, classes = yolo_non_max_suppression(scores, boxes, classes)\n print(\"scores[2] = \" + str(scores[2].eval()))\n print(\"boxes[2] = \" + str(boxes[2].eval()))\n print(\"classes[2] = \" + str(classes[2].eval()))\n print(\"scores.shape = \" + str(scores.eval().shape))\n print(\"boxes.shape = \" + str(boxes.eval().shape))\n print(\"classes.shape = \" + str(classes.eval().shape))", "scores[2] = 6.9384\nboxes[2] = [-5.299932 3.13798141 4.45036697 0.95942086]\nclasses[2] = -2.24527\nscores.shape = (10,)\nboxes.shape = (10, 4)\nclasses.shape = (10,)\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **scores[2]**\n </td>\n <td>\n 6.9384\n </td>\n </tr>\n <tr>\n <td>\n **boxes[2]**\n </td>\n <td>\n [-5.299932 3.13798141 4.45036697 0.95942086]\n </td>\n </tr>\n\n <tr>\n <td>\n **classes[2]**\n </td>\n <td>\n -2.24527\n </td>\n </tr>\n <tr>\n <td>\n **scores.shape**\n </td>\n <td>\n (10,)\n </td>\n </tr>\n <tr>\n <td>\n **boxes.shape**\n </td>\n <td>\n (10, 4)\n </td>\n </tr>\n\n <tr>\n <td>\n **classes.shape**\n </td>\n <td>\n (10,)\n </td>\n </tr>\n\n</table>", "_____no_output_____" ], [ "### 2.4 Wrapping up the filtering\n\nIt's time to implement a function taking the output of the deep CNN (the 19x19x5x85 dimensional encoding) and filtering through all the boxes using the functions you've just implemented. \n\n**Exercise**: Implement `yolo_eval()` which takes the output of the YOLO encoding and filters the boxes using score threshold and NMS. There's just one last implementational detail you have to know. There're a few ways of representing boxes, such as via their corners or via their midpoint and height/width. YOLO converts between a few such formats at different times, using the following functions (which we have provided): \n\n```python\nboxes = yolo_boxes_to_corners(box_xy, box_wh) \n```\nwhich converts the yolo box coordinates (x,y,w,h) to box corners' coordinates (x1, y1, x2, y2) to fit the input of `yolo_filter_boxes`\n```python\nboxes = scale_boxes(boxes, image_shape)\n```\nYOLO's network was trained to run on 608x608 images. If you are testing this data on a different size image--for example, the car detection dataset had 720x1280 images--this step rescales the boxes so that they can be plotted on top of the original 720x1280 image. \n\nDon't worry about these two functions; we'll show you where they need to be called. ", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: yolo_eval\n\ndef yolo_eval(yolo_outputs, image_shape = (720., 1280.), max_boxes=10, score_threshold=.6, iou_threshold=.5):\n \"\"\"\n Converts the output of YOLO encoding (a lot of boxes) to your predicted boxes along with their scores, box coordinates and classes.\n \n Arguments:\n yolo_outputs -- output of the encoding model (for image_shape of (608, 608, 3)), contains 4 tensors:\n box_confidence: tensor of shape (None, 19, 19, 5, 1)\n box_xy: tensor of shape (None, 19, 19, 5, 2)\n box_wh: tensor of shape (None, 19, 19, 5, 2)\n box_class_probs: tensor of shape (None, 19, 19, 5, 80)\n image_shape -- tensor of shape (2,) containing the input shape, in this notebook we use (608., 608.) (has to be float32 dtype)\n max_boxes -- integer, maximum number of predicted boxes you'd like\n score_threshold -- real value, if [ highest class probability score < threshold], then get rid of the corresponding box\n iou_threshold -- real value, \"intersection over union\" threshold used for NMS filtering\n \n Returns:\n scores -- tensor of shape (None, ), predicted score for each box\n boxes -- tensor of shape (None, 4), predicted box coordinates\n classes -- tensor of shape (None,), predicted class for each box\n \"\"\"\n \n ### START CODE HERE ### \n \n # Retrieve outputs of the YOLO model (≈1 line)\n box_confidence, box_xy, box_wh, box_class_probs = yolo_outputs\n\n # Convert boxes to be ready for filtering functions (convert boxes box_xy and box_wh to corner coordinates)\n boxes = yolo_boxes_to_corners(box_xy, box_wh)\n\n # Use one of the functions you've implemented to perform Score-filtering with a threshold of score_threshold (≈1 line)\n scores, boxes, classes = yolo_filter_boxes(box_confidence, boxes, box_class_probs, threshold = 0.5)\n \n # Scale boxes back to original image shape.\n boxes = scale_boxes(boxes, image_shape)\n\n # Use one of the functions you've implemented to perform Non-max suppression with \n # maximum number of boxes set to max_boxes and a threshold of iou_threshold (≈1 line)\n scores, boxes, classes = yolo_non_max_suppression(scores, boxes, classes)\n \n ### END CODE HERE ###\n \n return scores, boxes, classes", "_____no_output_____" ], [ "with tf.Session() as test_b:\n yolo_outputs = (tf.random_normal([19, 19, 5, 1], mean=1, stddev=4, seed = 1),\n tf.random_normal([19, 19, 5, 2], mean=1, stddev=4, seed = 1),\n tf.random_normal([19, 19, 5, 2], mean=1, stddev=4, seed = 1),\n tf.random_normal([19, 19, 5, 80], mean=1, stddev=4, seed = 1))\n scores, boxes, classes = yolo_eval(yolo_outputs)\n print(\"scores[2] = \" + str(scores[2].eval()))\n print(\"boxes[2] = \" + str(boxes[2].eval()))\n print(\"classes[2] = \" + str(classes[2].eval()))\n print(\"scores.shape = \" + str(scores.eval().shape))\n print(\"boxes.shape = \" + str(boxes.eval().shape))\n print(\"classes.shape = \" + str(classes.eval().shape))", "scores[2] = 138.791\nboxes[2] = [ 1292.32971191 -278.52166748 3876.98925781 -835.56494141]\nclasses[2] = 54\nscores.shape = (10,)\nboxes.shape = (10, 4)\nclasses.shape = (10,)\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **scores[2]**\n </td>\n <td>\n 138.791\n </td>\n </tr>\n <tr>\n <td>\n **boxes[2]**\n </td>\n <td>\n [ 1292.32971191 -278.52166748 3876.98925781 -835.56494141]\n </td>\n </tr>\n\n <tr>\n <td>\n **classes[2]**\n </td>\n <td>\n 54\n </td>\n </tr>\n <tr>\n <td>\n **scores.shape**\n </td>\n <td>\n (10,)\n </td>\n </tr>\n <tr>\n <td>\n **boxes.shape**\n </td>\n <td>\n (10, 4)\n </td>\n </tr>\n\n <tr>\n <td>\n **classes.shape**\n </td>\n <td>\n (10,)\n </td>\n </tr>\n\n</table>", "_____no_output_____" ], [ "## Summary for YOLO:\n- Input image (608, 608, 3)\n- The input image goes through a CNN, resulting in a (19,19,5,85) dimensional output. \n- After flattening the last two dimensions, the output is a volume of shape (19, 19, 425):\n - Each cell in a 19x19 grid over the input image gives 425 numbers. \n - 425 = 5 x 85 because each cell contains predictions for 5 boxes, corresponding to 5 anchor boxes, as seen in lecture. \n - 85 = 5 + 80 where 5 is because $(p_c, b_x, b_y, b_h, b_w)$ has 5 numbers, and 80 is the number of classes we'd like to detect\n- You then select only few boxes based on:\n - Score-thresholding: throw away boxes that have detected a class with a score less than the threshold\n - Non-max suppression: Compute the Intersection over Union and avoid selecting overlapping boxes\n- This gives you YOLO's final output. ", "_____no_output_____" ], [ "## 3 - Test YOLO pre-trained model on images", "_____no_output_____" ], [ "In this part, you are going to use a pre-trained model and test it on the car detection dataset. We'll need a session to execute the computation graph and evaluate the tensors.", "_____no_output_____" ] ], [ [ "sess = K.get_session()", "_____no_output_____" ] ], [ [ "### 3.1 - Defining classes, anchors and image shape.\n\n* Recall that we are trying to detect 80 classes, and are using 5 anchor boxes. \n* We have gathered the information on the 80 classes and 5 boxes in two files \"coco_classes.txt\" and \"yolo_anchors.txt\". \n* We'll read class names and anchors from text files.\n* The car detection dataset has 720x1280 images, which we've pre-processed into 608x608 images. ", "_____no_output_____" ] ], [ [ "class_names = read_classes(\"model_data/coco_classes.txt\")\nanchors = read_anchors(\"model_data/yolo_anchors.txt\")\nimage_shape = (720., 1280.) ", "_____no_output_____" ] ], [ [ "### 3.2 - Loading a pre-trained model\n\n* Training a YOLO model takes a very long time and requires a fairly large dataset of labelled bounding boxes for a large range of target classes. \n* You are going to load an existing pre-trained Keras YOLO model stored in \"yolo.h5\". \n* These weights come from the official YOLO website, and were converted using a function written by Allan Zelener. References are at the end of this notebook. Technically, these are the parameters from the \"YOLOv2\" model, but we will simply refer to it as \"YOLO\" in this notebook.\n\nRun the cell below to load the model from this file.", "_____no_output_____" ] ], [ [ "yolo_model = load_model(\"model_data/yolo.h5\")", "/opt/conda/lib/python3.6/site-packages/keras/models.py:251: UserWarning: No training configuration found in save file: the model was *not* compiled. Compile it manually.\n warnings.warn('No training configuration found in save file: '\n" ] ], [ [ "This loads the weights of a trained YOLO model. Here's a summary of the layers your model contains.", "_____no_output_____" ] ], [ [ "yolo_model.summary()", "____________________________________________________________________________________________________\nLayer (type) Output Shape Param # Connected to \n====================================================================================================\ninput_1 (InputLayer) (None, 608, 608, 3) 0 \n____________________________________________________________________________________________________\nconv2d_1 (Conv2D) (None, 608, 608, 32) 864 input_1[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_1 (BatchNorm (None, 608, 608, 32) 128 conv2d_1[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_1 (LeakyReLU) (None, 608, 608, 32) 0 batch_normalization_1[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_1 (MaxPooling2D) (None, 304, 304, 32) 0 leaky_re_lu_1[0][0] \n____________________________________________________________________________________________________\nconv2d_2 (Conv2D) (None, 304, 304, 64) 18432 max_pooling2d_1[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_2 (BatchNorm (None, 304, 304, 64) 256 conv2d_2[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_2 (LeakyReLU) (None, 304, 304, 64) 0 batch_normalization_2[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_2 (MaxPooling2D) (None, 152, 152, 64) 0 leaky_re_lu_2[0][0] \n____________________________________________________________________________________________________\nconv2d_3 (Conv2D) (None, 152, 152, 128) 73728 max_pooling2d_2[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_3 (BatchNorm (None, 152, 152, 128) 512 conv2d_3[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_3 (LeakyReLU) (None, 152, 152, 128) 0 batch_normalization_3[0][0] \n____________________________________________________________________________________________________\nconv2d_4 (Conv2D) (None, 152, 152, 64) 8192 leaky_re_lu_3[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_4 (BatchNorm (None, 152, 152, 64) 256 conv2d_4[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_4 (LeakyReLU) (None, 152, 152, 64) 0 batch_normalization_4[0][0] \n____________________________________________________________________________________________________\nconv2d_5 (Conv2D) (None, 152, 152, 128) 73728 leaky_re_lu_4[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_5 (BatchNorm (None, 152, 152, 128) 512 conv2d_5[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_5 (LeakyReLU) (None, 152, 152, 128) 0 batch_normalization_5[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_3 (MaxPooling2D) (None, 76, 76, 128) 0 leaky_re_lu_5[0][0] \n____________________________________________________________________________________________________\nconv2d_6 (Conv2D) (None, 76, 76, 256) 294912 max_pooling2d_3[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_6 (BatchNorm (None, 76, 76, 256) 1024 conv2d_6[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_6 (LeakyReLU) (None, 76, 76, 256) 0 batch_normalization_6[0][0] \n____________________________________________________________________________________________________\nconv2d_7 (Conv2D) (None, 76, 76, 128) 32768 leaky_re_lu_6[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_7 (BatchNorm (None, 76, 76, 128) 512 conv2d_7[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_7 (LeakyReLU) (None, 76, 76, 128) 0 batch_normalization_7[0][0] \n____________________________________________________________________________________________________\nconv2d_8 (Conv2D) (None, 76, 76, 256) 294912 leaky_re_lu_7[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_8 (BatchNorm (None, 76, 76, 256) 1024 conv2d_8[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_8 (LeakyReLU) (None, 76, 76, 256) 0 batch_normalization_8[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_4 (MaxPooling2D) (None, 38, 38, 256) 0 leaky_re_lu_8[0][0] \n____________________________________________________________________________________________________\nconv2d_9 (Conv2D) (None, 38, 38, 512) 1179648 max_pooling2d_4[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_9 (BatchNorm (None, 38, 38, 512) 2048 conv2d_9[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_9 (LeakyReLU) (None, 38, 38, 512) 0 batch_normalization_9[0][0] \n____________________________________________________________________________________________________\nconv2d_10 (Conv2D) (None, 38, 38, 256) 131072 leaky_re_lu_9[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_10 (BatchNor (None, 38, 38, 256) 1024 conv2d_10[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_10 (LeakyReLU) (None, 38, 38, 256) 0 batch_normalization_10[0][0] \n____________________________________________________________________________________________________\nconv2d_11 (Conv2D) (None, 38, 38, 512) 1179648 leaky_re_lu_10[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_11 (BatchNor (None, 38, 38, 512) 2048 conv2d_11[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_11 (LeakyReLU) (None, 38, 38, 512) 0 batch_normalization_11[0][0] \n____________________________________________________________________________________________________\nconv2d_12 (Conv2D) (None, 38, 38, 256) 131072 leaky_re_lu_11[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_12 (BatchNor (None, 38, 38, 256) 1024 conv2d_12[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_12 (LeakyReLU) (None, 38, 38, 256) 0 batch_normalization_12[0][0] \n____________________________________________________________________________________________________\nconv2d_13 (Conv2D) (None, 38, 38, 512) 1179648 leaky_re_lu_12[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_13 (BatchNor (None, 38, 38, 512) 2048 conv2d_13[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_13 (LeakyReLU) (None, 38, 38, 512) 0 batch_normalization_13[0][0] \n____________________________________________________________________________________________________\nmax_pooling2d_5 (MaxPooling2D) (None, 19, 19, 512) 0 leaky_re_lu_13[0][0] \n____________________________________________________________________________________________________\nconv2d_14 (Conv2D) (None, 19, 19, 1024) 4718592 max_pooling2d_5[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_14 (BatchNor (None, 19, 19, 1024) 4096 conv2d_14[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_14 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_14[0][0] \n____________________________________________________________________________________________________\nconv2d_15 (Conv2D) (None, 19, 19, 512) 524288 leaky_re_lu_14[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_15 (BatchNor (None, 19, 19, 512) 2048 conv2d_15[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_15 (LeakyReLU) (None, 19, 19, 512) 0 batch_normalization_15[0][0] \n____________________________________________________________________________________________________\nconv2d_16 (Conv2D) (None, 19, 19, 1024) 4718592 leaky_re_lu_15[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_16 (BatchNor (None, 19, 19, 1024) 4096 conv2d_16[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_16 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_16[0][0] \n____________________________________________________________________________________________________\nconv2d_17 (Conv2D) (None, 19, 19, 512) 524288 leaky_re_lu_16[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_17 (BatchNor (None, 19, 19, 512) 2048 conv2d_17[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_17 (LeakyReLU) (None, 19, 19, 512) 0 batch_normalization_17[0][0] \n____________________________________________________________________________________________________\nconv2d_18 (Conv2D) (None, 19, 19, 1024) 4718592 leaky_re_lu_17[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_18 (BatchNor (None, 19, 19, 1024) 4096 conv2d_18[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_18 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_18[0][0] \n____________________________________________________________________________________________________\nconv2d_19 (Conv2D) (None, 19, 19, 1024) 9437184 leaky_re_lu_18[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_19 (BatchNor (None, 19, 19, 1024) 4096 conv2d_19[0][0] \n____________________________________________________________________________________________________\nconv2d_21 (Conv2D) (None, 38, 38, 64) 32768 leaky_re_lu_13[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_19 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_19[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_21 (BatchNor (None, 38, 38, 64) 256 conv2d_21[0][0] \n____________________________________________________________________________________________________\nconv2d_20 (Conv2D) (None, 19, 19, 1024) 9437184 leaky_re_lu_19[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_21 (LeakyReLU) (None, 38, 38, 64) 0 batch_normalization_21[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_20 (BatchNor (None, 19, 19, 1024) 4096 conv2d_20[0][0] \n____________________________________________________________________________________________________\nspace_to_depth_x2 (Lambda) (None, 19, 19, 256) 0 leaky_re_lu_21[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_20 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_20[0][0] \n____________________________________________________________________________________________________\nconcatenate_1 (Concatenate) (None, 19, 19, 1280) 0 space_to_depth_x2[0][0] \n leaky_re_lu_20[0][0] \n____________________________________________________________________________________________________\nconv2d_22 (Conv2D) (None, 19, 19, 1024) 11796480 concatenate_1[0][0] \n____________________________________________________________________________________________________\nbatch_normalization_22 (BatchNor (None, 19, 19, 1024) 4096 conv2d_22[0][0] \n____________________________________________________________________________________________________\nleaky_re_lu_22 (LeakyReLU) (None, 19, 19, 1024) 0 batch_normalization_22[0][0] \n____________________________________________________________________________________________________\nconv2d_23 (Conv2D) (None, 19, 19, 425) 435625 leaky_re_lu_22[0][0] \n====================================================================================================\nTotal params: 50,983,561\nTrainable params: 50,962,889\nNon-trainable params: 20,672\n____________________________________________________________________________________________________\n" ] ], [ [ "**Note**: On some computers, you may see a warning message from Keras. Don't worry about it if you do--it is fine.\n\n**Reminder**: this model converts a preprocessed batch of input images (shape: (m, 608, 608, 3)) into a tensor of shape (m, 19, 19, 5, 85) as explained in Figure (2).", "_____no_output_____" ], [ "### 3.3 - Convert output of the model to usable bounding box tensors\n\nThe output of `yolo_model` is a (m, 19, 19, 5, 85) tensor that needs to pass through non-trivial processing and conversion. The following cell does that for you.\n\nIf you are curious about how `yolo_head` is implemented, you can find the function definition in the file ['keras_yolo.py'](https://github.com/allanzelener/YAD2K/blob/master/yad2k/models/keras_yolo.py). The file is located in your workspace in this path 'yad2k/models/keras_yolo.py'.", "_____no_output_____" ] ], [ [ "yolo_outputs = yolo_head(yolo_model.output, anchors, len(class_names))", "_____no_output_____" ] ], [ [ "You added `yolo_outputs` to your graph. This set of 4 tensors is ready to be used as input by your `yolo_eval` function.", "_____no_output_____" ], [ "### 3.4 - Filtering boxes\n\n`yolo_outputs` gave you all the predicted boxes of `yolo_model` in the correct format. You're now ready to perform filtering and select only the best boxes. Let's now call `yolo_eval`, which you had previously implemented, to do this. ", "_____no_output_____" ] ], [ [ "scores, boxes, classes = yolo_eval(yolo_outputs, image_shape)", "_____no_output_____" ] ], [ [ "### 3.5 - Run the graph on an image\n\nLet the fun begin. You have created a graph that can be summarized as follows:\n\n1. <font color='purple'> yolo_model.input </font> is given to `yolo_model`. The model is used to compute the output <font color='purple'> yolo_model.output </font>\n2. <font color='purple'> yolo_model.output </font> is processed by `yolo_head`. It gives you <font color='purple'> yolo_outputs </font>\n3. <font color='purple'> yolo_outputs </font> goes through a filtering function, `yolo_eval`. It outputs your predictions: <font color='purple'> scores, boxes, classes </font>\n\n**Exercise**: Implement predict() which runs the graph to test YOLO on an image.\nYou will need to run a TensorFlow session, to have it compute `scores, boxes, classes`.\n\nThe code below also uses the following function:\n```python\nimage, image_data = preprocess_image(\"images/\" + image_file, model_image_size = (608, 608))\n```\nwhich outputs:\n- image: a python (PIL) representation of your image used for drawing boxes. You won't need to use it.\n- image_data: a numpy-array representing the image. This will be the input to the CNN.\n\n**Important note**: when a model uses BatchNorm (as is the case in YOLO), you will need to pass an additional placeholder in the feed_dict {K.learning_phase(): 0}.\n\n#### Hint: Using the TensorFlow Session object\n* Recall that above, we called `K.get_Session()` and saved the Session object in `sess`.\n* To evaluate a list of tensors, we call `sess.run()` like this:\n```\nsess.run(fetches=[tensor1,tensor2,tensor3],\n feed_dict={yolo_model.input: the_input_variable,\n K.learning_phase():0\n }\n```\n* Notice that the variables `scores, boxes, classes` are not passed into the `predict` function, but these are global variables that you will use within the `predict` function.", "_____no_output_____" ] ], [ [ "def predict(sess, image_file):\n \"\"\"\n Runs the graph stored in \"sess\" to predict boxes for \"image_file\". Prints and plots the predictions.\n \n Arguments:\n sess -- your tensorflow/Keras session containing the YOLO graph\n image_file -- name of an image stored in the \"images\" folder.\n \n Returns:\n out_scores -- tensor of shape (None, ), scores of the predicted boxes\n out_boxes -- tensor of shape (None, 4), coordinates of the predicted boxes\n out_classes -- tensor of shape (None, ), class index of the predicted boxes\n \n Note: \"None\" actually represents the number of predicted boxes, it varies between 0 and max_boxes. \n \"\"\"\n\n # Preprocess your image\n image, image_data = preprocess_image(\"images/\" + image_file, model_image_size = (608, 608))\n\n # Run the session with the correct tensors and choose the correct placeholders in the feed_dict.\n # You'll need to use feed_dict={yolo_model.input: ... , K.learning_phase(): 0})\n ### START CODE HERE ### (≈ 1 line)\n out_scores, out_boxes, out_classes = sess.run([scores, boxes, classes], feed_dict={yolo_model.input: image_data, K.learning_phase(): 0})\n ### END CODE HERE ###\n\n # Print predictions info\n print('Found {} boxes for {}'.format(len(out_boxes), image_file))\n # Generate colors for drawing bounding boxes.\n colors = generate_colors(class_names)\n # Draw bounding boxes on the image file\n draw_boxes(image, out_scores, out_boxes, out_classes, class_names, colors)\n # Save the predicted bounding box on the image\n image.save(os.path.join(\"out\", image_file), quality=90)\n # Display the results in the notebook\n output_image = scipy.misc.imread(os.path.join(\"out\", image_file))\n imshow(output_image)\n \n return out_scores, out_boxes, out_classes", "_____no_output_____" ] ], [ [ "Run the following cell on the \"test.jpg\" image to verify that your function is correct.", "_____no_output_____" ] ], [ [ "out_scores, out_boxes, out_classes = predict(sess, \"test.jpg\")", "Found 7 boxes for test.jpg\ncar 0.60 (925, 285) (1045, 374)\ncar 0.66 (706, 279) (786, 350)\nbus 0.67 (5, 266) (220, 407)\ncar 0.70 (947, 324) (1280, 705)\ncar 0.74 (159, 303) (346, 440)\ncar 0.80 (761, 282) (942, 412)\ncar 0.89 (367, 300) (745, 648)\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **Found 7 boxes for test.jpg**\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.60 (925, 285) (1045, 374)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.66 (706, 279) (786, 350)\n </td>\n </tr>\n <tr>\n <td>\n **bus**\n </td>\n <td>\n 0.67 (5, 266) (220, 407)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.70 (947, 324) (1280, 705)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.74 (159, 303) (346, 440)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.80 (761, 282) (942, 412)\n </td>\n </tr>\n <tr>\n <td>\n **car**\n </td>\n <td>\n 0.89 (367, 300) (745, 648)\n </td>\n </tr>\n</table>", "_____no_output_____" ], [ "The model you've just run is actually able to detect 80 different classes listed in \"coco_classes.txt\". To test the model on your own images:\n 1. Click on \"File\" in the upper bar of this notebook, then click \"Open\" to go on your Coursera Hub.\n 2. Add your image to this Jupyter Notebook's directory, in the \"images\" folder\n 3. Write your image's name in the cell above code\n 4. Run the code and see the output of the algorithm!\n\nIf you were to run your session in a for loop over all your images. Here's what you would get:\n\n<center>\n<video width=\"400\" height=\"200\" src=\"nb_images/pred_video_compressed2.mp4\" type=\"video/mp4\" controls>\n</video>\n</center>\n\n<caption><center> Predictions of the YOLO model on pictures taken from a camera while driving around the Silicon Valley <br> Thanks [drive.ai](https://www.drive.ai/) for providing this dataset! </center></caption>", "_____no_output_____" ], [ "\n## <font color='darkblue'>What you should remember:\n \n- YOLO is a state-of-the-art object detection model that is fast and accurate\n- It runs an input image through a CNN which outputs a 19x19x5x85 dimensional volume. \n- The encoding can be seen as a grid where each of the 19x19 cells contains information about 5 boxes.\n- You filter through all the boxes using non-max suppression. Specifically: \n - Score thresholding on the probability of detecting a class to keep only accurate (high probability) boxes\n - Intersection over Union (IoU) thresholding to eliminate overlapping boxes\n- Because training a YOLO model from randomly initialized weights is non-trivial and requires a large dataset as well as lot of computation, we used previously trained model parameters in this exercise. If you wish, you can also try fine-tuning the YOLO model with your own dataset, though this would be a fairly non-trivial exercise. ", "_____no_output_____" ], [ "**References**: The ideas presented in this notebook came primarily from the two YOLO papers. The implementation here also took significant inspiration and used many components from Allan Zelener's GitHub repository. The pre-trained weights used in this exercise came from the official YOLO website. \n- Joseph Redmon, Santosh Divvala, Ross Girshick, Ali Farhadi - [You Only Look Once: Unified, Real-Time Object Detection](https://arxiv.org/abs/1506.02640) (2015)\n- Joseph Redmon, Ali Farhadi - [YOLO9000: Better, Faster, Stronger](https://arxiv.org/abs/1612.08242) (2016)\n- Allan Zelener - [YAD2K: Yet Another Darknet 2 Keras](https://github.com/allanzelener/YAD2K)\n- The official YOLO website (https://pjreddie.com/darknet/yolo/) ", "_____no_output_____" ], [ "**Car detection dataset**:\n<a rel=\"license\" href=\"http://creativecommons.org/licenses/by/4.0/\"><img alt=\"Creative Commons License\" style=\"border-width:0\" src=\"https://i.creativecommons.org/l/by/4.0/88x31.png\" /></a><br /><span xmlns:dct=\"http://purl.org/dc/terms/\" property=\"dct:title\">The Drive.ai Sample Dataset</span> (provided by drive.ai) is licensed under a <a rel=\"license\" href=\"http://creativecommons.org/licenses/by/4.0/\">Creative Commons Attribution 4.0 International License</a>. We are grateful to Brody Huval, Chih Hu and Rahul Patel for providing this data. ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
cbfe5f3e82eec0b50d643cd30d829be0e7883725
298,960
ipynb
Jupyter Notebook
99_helpers.ipynb
wooohoooo/thompson_sampling
259f4ba5d7ec023c5c48fa51e9d6c2ba0ee19fcc
[ "Apache-2.0" ]
1
2021-07-28T16:35:58.000Z
2021-07-28T16:35:58.000Z
99_helpers.ipynb
wooohoooo/thompson_sampling
259f4ba5d7ec023c5c48fa51e9d6c2ba0ee19fcc
[ "Apache-2.0" ]
2
2020-04-24T12:58:34.000Z
2022-02-26T07:03:05.000Z
99_helpers.ipynb
wooohoooo/thompson_sampling
259f4ba5d7ec023c5c48fa51e9d6c2ba0ee19fcc
[ "Apache-2.0" ]
null
null
null
478.336
206,484
0.917481
[ [ [ "# default_exp helpers", "_____no_output_____" ] ], [ [ "# helpers\n> this didn't fit anywhere else", "_____no_output_____" ] ], [ [ "#export\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n \nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\n#ToDo: Propagate them through the methods\niters = 10\nl2 = 1\nn_std = 4\nfrom pygments import highlight\nfrom pygments.lexers import PythonLexer\nfrom pygments.formatters import HtmlFormatter\nimport IPython\n\n\ndef plot_regret(y_optimal_list,y_hat_list):\n y_optimal_array = np.array(y_optimal_list)\n y_hat_array = np.array(y_hat_list)\n regret_list = []\n\n\n regret = np.cumsum(y_optimal_array - y_hat_array)\n\n plt.plot(regret)\n \n \ndef showcase_code(pyfile,class_name = False, method_name = False, end_string = False):\n \"\"\"shows content of py file\"\"\"\n \n\n with open(pyfile) as f:\n code = f.read()\n\n if class_name:\n #1. find beginning (class + <name>)\n index = code.find(f'class {class_name}')\n code = code[index:]\n\n #2. find end (class (new class!) or end of script)\n end_index = code[7:].find('class')\n \n if method_name:\n #1. find beginning (class + <name>)\n index = code.find(f'def {method_name}')\n code = code[index:]\n\n #2. find end (class (new class!) or end of script)\n end_index = code[7:].find('def')\n\n \n if end_string:\n end_index = code[7:].find('# helpers')\n\n code = code[:end_index]\n\n formatter = HtmlFormatter()\n return IPython.display.HTML('<style type=\"text/css\">{}</style>{}'.format(\n formatter.get_style_defs('.highlight'),\n highlight(code, PythonLexer(), formatter)))\n", "\nBad key \"text.kerning_factor\" on line 4 in\n/home/thomas/anaconda3/envs/pytorch_GPU/lib/python3.7/site-packages/matplotlib/mpl-data/stylelib/_classic_test_patch.mplstyle.\nYou probably need to get an updated matplotlibrc file from\nhttps://github.com/matplotlib/matplotlib/blob/v3.1.3/matplotlibrc.template\nor from the matplotlib source distribution\n" ], [ "showcase_code('thompson_sampling/helpers.py',method_name='showcase_code')", "_____no_output_____" ], [ "showcase_code('thompson_sampling/solvers.py',class_name='BetaBandit', end_string = True)", "_____no_output_____" ], [ "#export\nimport scipy.stats as stats\n\ndef plot_online_logreg(online_lr, wee_x, wee_y):\n # closing other figures\n plt.close('all')\n plt.figure(figsize=[9,3.5], dpi=150)\n\n # let us check the distribution of weights and uncertainty bounds\n plt.figure(figsize=[9,3.5], dpi=150)\n\n # plotting the pdf of the weight distribution\n X_pdf = np.linspace(-4, 4, 1000)\n pdf = stats.norm(loc=online_lr.m, scale=online_lr.q**(-1.0)).pdf(X_pdf)\n\n # range and resolution of probability plot\n X_prob = np.linspace(-6, 6, 1000)\n p_dist = 1/(1 + np.exp(-X_prob * online_lr.m))\n p_dist_plus = 1/(1 + np.exp(-X_prob * (online_lr.m + 2*online_lr.q**(-1.0))))\n p_dist_minus = 1/(1 + np.exp(-X_prob * (online_lr.m - 2*online_lr.q**(-1.0))))\n\n # opening subplots\n ax1 = plt.subplot2grid((1, 5), (0, 0), colspan=2, rowspan=1)\n ax2 = plt.subplot2grid((1, 5), (0, 2), colspan=3, rowspan=1)\n\n # plotting distriution of weights\n ax1.plot(X_pdf, pdf, color='b', linewidth=2, alpha=0.5)\n #ax1.plot([cmab.weights[0][1], cmab.weights[0][1]], [0, max(pdf)], 'k--', label='True $\\\\beta$', linewidth=1)\n ax1.fill_between(X_pdf, pdf, 0, color='b', alpha=0.2)\n\n # plotting probabilities\n ax2.plot(X_prob, p_dist, color='b', linewidth=2, alpha=0.5)\n ax2.fill_between(X_prob, p_dist_plus, p_dist_minus, color='b', alpha=0.2)\n ax2.scatter(wee_x, wee_y, c='k')\n\n # title and comments\n ax1.set_title('OLR estimate for $\\\\beta$', fontsize=10)\n ax1.set_xlabel('$\\\\beta$', fontsize=10); ax1.set_ylabel('$density$', fontsize=10)\n ax2.set_title('OLR estimate for $\\\\theta(x)$', fontsize=10)\n ax2.set_xlabel('$x$', fontsize=10); ax2.set_ylabel('$\\\\theta(x)$', fontsize=10)\n\n ax1.legend(fontsize=10)\n plt.tight_layout()\n plt.show()", "_____no_output_____" ], [ "\nimport numpy as np\nfrom thompson_sampling.models import OnlineLogisticRegression, BatchBayesLinReg\nfrom thompson_sampling.multi_armed_bandits import contextual_categorical_bandit\n\ntheta = [0.6,1.0]\nnoise = 0.1\n\n\n\nwee_x = np.random.uniform(-6,6,10)\nwee_y = np.array([contextual_categorical_bandit(x,0, theta, noise)[0] for x in wee_x])\n\n\n\n\n# OLR object\nonline_lr = OnlineLogisticRegression(1, .5, 1)\nfor i in range(len(wee_y)):\n online_lr.fit(wee_x[i].reshape(-1,1), wee_y[i].reshape(-1,1))\n \n \n \n \nplot_online_logreg(online_lr, wee_x, wee_y)", "No handles with labels found to put in legend.\n" ], [ "#export\nfrom mpl_toolkits.axes_grid1 import ImageGrid\n\n\ndef prettify_ax(ax):\n ax.set_xlim(-1, 1)\n ax.set_ylim(-1, 1)\n ax.set_xlabel('$w_1$')\n ax.set_ylabel('$w_2$')\n return ax\n\n\ndef sample(n, weights):\n for _ in range(n):\n x = np.array([1, np.random.uniform(-1, 1)])\n y = np.dot(weights, x) + np.random.normal(0, .2)\n yield x, y\n \ndef sample(n, weights):\n X = np.array([[1, np.random.uniform(-1, 1)] for i in range(n)])\n y = [np.dot(weights, x) + np.random.normal(0, .2) for x in X]\n return X, y \n\n \n\n\ndef plot_param_through_time(model,N,n_samples,X,y,):\n\n w = np.linspace(-1, 1, 100)\n W = np.dstack(np.meshgrid(w, w))\n\n n_samples = 5\n fig = plt.figure(figsize=(7 * n_samples, 21))\n grid = ImageGrid(\n fig, 111, # similar to subplot(111)\n nrows_ncols=(n_samples, 3), # creates a n_samplesx3 grid of axes\n axes_pad=.5 # pad between axes in inch.\n )\n\n # We'll store the features and targets for plotting purposes\n xs = []\n ys = []\n\n\n\n\n\n for i, (xi, yi) in enumerate(zip(X,y)):\n\n pred_dist = model.predict(xi)\n\n # Prior weight distribution\n ax = prettify_ax(grid[3 * i])\n ax.set_title(f'Prior weight distribution #{i + 1}')\n ax.contourf(w, w, model.weights_dist.pdf(W), N, cmap='viridis')\n ax.scatter(*weights, color='red') # true weights the model has to find\n\n # Update model\n model.learn(xi, yi)\n\n # Prior weight distribution\n ax = prettify_ax(grid[3 * i + 1])\n ax.set_title(f'Posterior weight distribution #{i + 1}')\n ax.contourf(w, w, model.weights_dist.pdf(W), N, cmap='viridis')\n ax.scatter(*weights, color='red') # true weights the model has to find\n\n # Posterior target distribution\n xs.append(xi)\n ys.append(yi)\n posteriors = [model.predict(np.array([1, wi])) for wi in w]\n ax = prettify_ax(grid[3 * i + 2])\n ax.set_title(f'Posterior target distribution #{i + 1}')\n # Plot the old points and the new points\n ax.scatter([xi[1] for xi in xs[:-1]], ys[:-1])\n ax.scatter(xs[-1][1], ys[-1], marker='*')\n # Plot the predictive mean along with the predictive interval\n ax.plot(w, [p.mean() for p in posteriors], linestyle='--')\n cis = [p.interval(.95) for p in posteriors]\n ax.fill_between(\n x=w,\n y1=[ci[0] for ci in cis],\n y2=[ci[1] for ci in cis],\n alpha=.1\n )\n # Plot the true target distribution\n ax.plot(w, [np.dot(weights, [1, xi]) for xi in w], color='red')\n", "_____no_output_____" ], [ "def sample(n, weights):\n for _ in range(n):\n x = np.array([1, np.random.uniform(-1, 1)])\n y = np.dot(weights, x) + np.random.normal(0, .2)\n yield x, y\n \ndef sample(n, weights):\n X = np.array([[1, np.random.uniform(-1, 1)] for i in range(n)])\n y = [np.dot(weights, x) + np.random.normal(0, .2) for x in X]\n return X, y \n\n \nmodel = BatchBayesLinReg(n_features=2, alpha=2, beta=25)\n\nnp.random.seed(42)\n\n# Pick some true parameters that the model has to find\nweights = np.array([-.3, .5])\n \nn_samples = 5\nN = 100\n\nX,y = sample(n_samples, weights)\n\nplot_param_through_time(model,N,n_samples,X,y)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ] ]
cbfe808491ea66b23acdd27097691c1dbb5519b3
24,385
ipynb
Jupyter Notebook
dev/42_tabular_rapids.ipynb
ctombumila37/fastai_dev
141bffce2cf9f8a019e51199a6363338b135a1a9
[ "Apache-2.0" ]
null
null
null
dev/42_tabular_rapids.ipynb
ctombumila37/fastai_dev
141bffce2cf9f8a019e51199a6363338b135a1a9
[ "Apache-2.0" ]
null
null
null
dev/42_tabular_rapids.ipynb
ctombumila37/fastai_dev
141bffce2cf9f8a019e51199a6363338b135a1a9
[ "Apache-2.0" ]
null
null
null
31.024173
252
0.494525
[ [ [ "#export\nfrom local.torch_basics import *\nfrom local.test import *\nfrom local.core import *\nfrom local.data.all import *\nfrom local.tabular.core import *\ntry: import cudf,nvcategory\nexcept: print(\"This requires rapids, see https://rapids.ai/ for installation details\")", "_____no_output_____" ], [ "from local.notebook.showdoc import *", "_____no_output_____" ], [ "#default_exp tabular.rapids", "_____no_output_____" ] ], [ [ "# Tabular with rapids\n\n> Basic functions to preprocess tabular data before assembling it in a `DataBunch` on the GPU.", "_____no_output_____" ] ], [ [ "#export\n@patch\ndef __array__(self:cudf.DataFrame): return self.pandas().__array__()", "_____no_output_____" ], [ "#export\nclass TabularGPU(Tabular):\n def transform(self, cols, f): \n for c in cols: self[c] = f(self[c])\n \n def __getattr__(self,k):\n if isinstance(self.items, cudf.DataFrame) and k in self.items.columns: return self.items[k]\n return super().__getattr__(k)", "_____no_output_____" ] ], [ [ "## TabularProcessors", "_____no_output_____" ] ], [ [ "#export\ndef _to_str(c): return c if c.dtype == \"object\" else c.astype(\"str\")\ndef _remove_none(c):\n if None in c: c.remove(None)\n return c", "_____no_output_____" ], [ "#export\n@Categorify\ndef setups(self, to: TabularGPU):\n self.lbls = {n: nvcategory.from_strings(_to_str(to.iloc[:,n]).data).keys() for n in to.all_cat_names}\n self.classes = {n: CategoryMap(_remove_none(c.to_host()), add_na=(n in to.cat_names)) for n,c in self.lbls.items()}\n \n@patch\ndef _apply_cats_gpu(self: Categorify, c): \n return cudf.Series(nvcategory.from_strings(_to_str(c).data).set_keys(self.lbls[c.name]).values()).add(add)\n\n@Categorify\ndef encodes(self, to: TabularGPU): \n def _apply_cats_gpu(add, c): \n return cudf.Series(nvcategory.from_strings(_to_str(c).data).set_keys(self.lbls[c.name]).values()).add(add)\n to.transform(to.cat_names, partial(_apply_cats_gpu, 1))\n to.transform(L(to.cat_y), partial(_apply_cats_gpu, 0))", "_____no_output_____" ], [ "df = cudf.from_pandas(pd.DataFrame({'a':[0,1,2,0,2]}))\nto = TabularGPU(df, Categorify, 'a')\ncat = to.procs.categorify\ntest_eq(list(cat['a']), ['#na#','0','1','2'])\ntest_eq(to.a.to_array(), np.array([1,2,3,1,3]))\ndf1 = cudf.from_pandas(pd.DataFrame({'a':[1,0,3,-1,2]}))\nto1 = to.new(df1)\ncat(to1)\n#Values that weren't in the training df are sent to 0 (na)\ntest_eq(to1.a.to_array(), np.array([2,1,0,0,3]))", "_____no_output_____" ], [ "#Test decode\nto2 = TabularPandas(to1.items.to_pandas(), None, 'a')\nto2 = cat.decode(to2)\ntest_eq(to2.a, np.array(['1','0','#na#','#na#','2']))", "_____no_output_____" ], [ "df = cudf.from_pandas(pd.DataFrame({'a':[0,1,2,3,2]}))\nto = TabularGPU(df, Categorify, 'a', splits=[[0,1,2], [3,4]])\ncat = to.procs.categorify\ntest_eq(list(cat['a']), ['#na#','0','1','2'])\ntest_eq(to.a.to_array(), np.array([1,2,3,0,3]))", "_____no_output_____" ], [ "#TODO Categorical (fails for now)\n#df = cudf.from_pandas(pd.DataFrame({'a':pd.Categorical(['M','H','L','M'], categories=['H','M','L'], ordered=True)}))\n#to = TabularGPU(df, Categorify, 'a')\n#cat = to.procs.categorify\n#test_eq(cat['a'].to_host(), ['H','M','L'])\n#test_eq(df[\"a\"].to_array(), [2,1,3,2])", "_____no_output_____" ], [ "#export\n@Normalize\ndef setups(self, to: TabularGPU):\n self.means = {n: to.iloc[:,n].mean() for n in to.cont_names}\n self.stds = {n: to.iloc[:,n].std(ddof=0)+1e-7 for n in to.cont_names}\n\n@Normalize\ndef encodes(self, to: TabularGPU):\n to.transform(to.cont_names, lambda c: (c-self.means[c.name])/self.stds[c.name])", "_____no_output_____" ], [ "df = cudf.from_pandas(pd.DataFrame({'a':[0,1,2,3,4]}))\nto = TabularGPU(df, Normalize, cont_names='a')\nnorm = to.procs.normalize\nx = np.array([0,1,2,3,4])\nm,s = x.mean(),x.std()\ntest_eq(norm.means['a'], m)\ntest_close(norm.stds['a'], s)\ntest_close(to.a.to_array(), (x-m)/s)\ndf1 = cudf.from_pandas(pd.DataFrame({'a':[5,6,7]}))\nto1 = to.new(df1)\nnorm(to1)\ntest_close(to1.a.to_array(), (np.array([5,6,7])-m)/s)\n\nto2 = TabularPandas(to1.items.to_pandas(), None, cont_names='a')\nto2 = norm.decode(to2)\ntest_close(to2.a, [5,6,7])", "_____no_output_____" ], [ "df = cudf.from_pandas(pd.DataFrame({'a':[0,1,2,3,4]}))\nto = TabularGPU(df, Normalize, cont_names='a', splits=[[0,1,2], [3,4]])\nnorm = to.procs.normalize\n\nx = np.array([0,1,2])\nm,s = x.mean(),x.std()\ntest_eq(norm.means, {'a': m})\ntest_close(norm.stds['a'], s)\ntest_close(to.a.to_array(), (np.array([0,1,2,3,4])-m)/s)", "_____no_output_____" ], [ "#export\n@patch\ndef median(self:cudf.Series):\n \"Get the median of `self`\"\n col = self.dropna().reset_index(drop=True).sort_values()\n return col[len(col)//2] if len(col)%2 != 0 else (col[len(col)//2]+col[len(col)//2-1])/2", "_____no_output_____" ], [ "col = cudf.Series([0,1,np.nan,1,2,3,4])\ntest_eq(col.median(), 1.5)\ncol = cudf.Series([np.nan,1,np.nan,1,2,3,4])\ntest_eq(col.median(), 2)", "_____no_output_____" ], [ "#export\n@patch\ndef idxmax(self:cudf.Series):\n \"Return the index of the first occurence of the max in `self`\"\n return self.argsort(ascending=False).index[0]", "_____no_output_____" ], [ "#export\n@FillMissing\ndef setups(self, to: TabularGPU):\n self.na_dict = {}\n for n in to.cont_names:\n col = to.iloc[:, n]\n if col.isnull().any(): self.na_dict[n] = self.fill_strategy(col, self.fill_vals[n])\n\n@FillMissing\ndef encodes(self, to: TabularGPU):\n for n in to.cont_names:\n if n in self.na_dict:\n if self.add_col:\n to.items[n+'_na'] = to[n].isnull()\n if n+'_na' not in to.cat_names: to.cat_names.append(n+'_na')\n to[n] = to[n].fillna(self.na_dict[n])\n elif df[n].isnull().any():\n raise Exception(f\"nan values in `{n}` but not in setup training set\")", "_____no_output_____" ], [ "fill1,fill2,fill3 = (FillMissing(fill_strategy=s) \n for s in [FillStrategy.median, FillStrategy.constant, FillStrategy.mode])\ndf = cudf.from_pandas(pd.DataFrame({'a':[0,1,np.nan,1,2,3,4]}))\ndf1 = df.copy(); df2 = df.copy()\ntos = TabularGPU(df, fill1, cont_names='a'),TabularGPU(df1, fill2, cont_names='a'),TabularGPU(df2, fill3, cont_names='a')\n\ntest_eq(fill1.na_dict, {'a': 1.5})\ntest_eq(fill2.na_dict, {'a': 0})\ntest_eq(fill3.na_dict, {'a': 1.0})\n\nfor t in tos: test_eq(t.cat_names, ['a_na'])\n\nfor to_,v in zip(tos, [1.5, 0., 1.]):\n test_eq(to_.a.to_array(), np.array([0, 1, v, 1, 2, 3, 4]))\n test_eq(to_.a_na.to_array(), np.array([0, 0, 1, 0, 0, 0, 0]))", "_____no_output_____" ], [ "dfa = cudf.from_pandas(pd.DataFrame({'a':[np.nan,0,np.nan]}))\ntos = [t.new(o) for t,o in zip(tos,(dfa,dfa.copy(),dfa.copy()))]\nfor t in tos: t.process()\nfor to_,v in zip(tos, [1.5, 0., 1.]):\n test_eq(to_.a.to_array(), np.array([v, 0, v]))\n test_eq(to_.a_na.to_array(), np.array([1, 0, 1]))", "_____no_output_____" ] ], [ [ "## Tabular Pipelines -", "_____no_output_____" ] ], [ [ "procs = [Normalize, Categorify, FillMissing, noop]\ndf = cudf.from_pandas(pd.DataFrame({'a':[0,1,2,1,1,2,0], 'b':[0,1,np.nan,1,2,3,4]}))\nto = TabularGPU(df, procs, cat_names='a', cont_names='b')\n\n#Test setup and apply on df_trn\ntest_eq(to.a.to_array(), [1,2,3,2,2,3,1])\ntest_eq(to.b_na.to_array(), [1,1,2,1,1,1,1])\nx = np.array([0,1,1.5,1,2,3,4])\nm,s = x.mean(),x.std()\ntest_close(to.b.to_array(), (x-m)/s)\ntest_eq(to.procs.classes, {'a': ['#na#','0','1','2'], 'b_na': ['#na#','False','True']})", "_____no_output_____" ], [ "#Test apply on y_names\nprocs = [Normalize, Categorify, FillMissing, noop]\ndf = cudf.from_pandas(pd.DataFrame({'a':[0,1,2,1,1,2,0], 'b':[0,1,np.nan,1,2,3,4], 'c': ['b','a','b','a','a','b','a']}))\nto = TabularGPU(df, procs, cat_names='a', cont_names='b', y_names='c')\n\ntest_eq(to.cat_names, ['a', 'b_na'])\ntest_eq(to.a.to_array(), [1,2,3,2,2,3,1])\ntest_eq(to.b_na.to_array(), [1,1,2,1,1,1,1])\ntest_eq(to.c.to_array(), [1,0,1,0,0,1,0])\nx = np.array([0,1,1.5,1,2,3,4])\nm,s = x.mean(),x.std()\ntest_close(to.b.to_array(), (x-m)/s)\ntest_eq(to.procs.classes, {'a': ['#na#','0','1','2'], 'b_na': ['#na#','False','True'], 'c': ['a','b']})", "_____no_output_____" ], [ "procs = [Normalize, Categorify, FillMissing, noop]\ndf = cudf.from_pandas(pd.DataFrame({'a':[0,1,2,1,1,2,0], 'b':[0,1,np.nan,1,2,3,4], 'c': ['b','a','b','a','a','b','a']}))\nto = TabularGPU(df, procs, cat_names='a', cont_names='b', y_names='c')\n\ntest_eq(to.cat_names, ['a', 'b_na'])\ntest_eq(to.a.to_array(), [1,2,3,2,2,3,1])\ntest_eq(to.a.dtype,int)\ntest_eq(to.b_na.to_array(), [1,1,2,1,1,1,1])\ntest_eq(to.c.to_array(), [1,0,1,0,0,1,0])", "_____no_output_____" ], [ "procs = [Normalize, Categorify, FillMissing, noop]\ndf = cudf.from_pandas(pd.DataFrame({'a':[0,1,2,1,1,2,0], 'b':[0,np.nan,1,1,2,3,4], 'c': ['b','a','b','a','a','b','a']}))\nto = TabularGPU(df, procs, cat_names='a', cont_names='b', y_names='c', splits=[[0,1,4,6], [2,3,5]])\n\ntest_eq(to.cat_names, ['a', 'b_na'])\ntest_eq(to.a.to_array(), [1,2,2,1,0,2,0])\ntest_eq(to.a.dtype,int)\ntest_eq(to.b_na.to_array(), [1,2,1,1,1,1,1])\ntest_eq(to.c.to_array(), [1,0,0,0,1,0,1])", "_____no_output_____" ], [ "#export\nfrom torch.utils.dlpack import from_dlpack\n\n@ReadTabBatch\ndef encodes(self, to: TabularGPU):\n return from_dlpack(to.cats.to_dlpack()).long(),from_dlpack(to.conts.to_dlpack()).float(), from_dlpack(to.targ.to_dlpack()).long()", "_____no_output_____" ] ], [ [ "## Integration example", "_____no_output_____" ] ], [ [ "path = untar_data(URLs.ADULT_SAMPLE)\ndf = cudf.from_pandas(pd.read_csv(path/'adult.csv'))\ndf_trn,df_tst = df.iloc[:10000].copy(),df.iloc[10000:].copy()\ndf_trn.head()", "_____no_output_____" ], [ "cat_names = ['workclass', 'education', 'marital-status', 'occupation', 'relationship', 'race']\ncont_names = ['age', 'fnlwgt', 'education-num']\nprocs = [Categorify, FillMissing, Normalize]\n\nsplits = RandomSplitter()(range_of(df_trn))", "_____no_output_____" ], [ "%time to = TabularGPU(df_trn, procs, splits=splits, cat_names=cat_names, cont_names=cont_names, y_names=\"salary\")", "CPU times: user 755 ms, sys: 36.7 ms, total: 792 ms\nWall time: 798 ms\n" ], [ "splits = [list(range(len(splits[0]))), list(range(len(splits[0]), 10000))]\ndsrc = DataSource(to, splits=splits, tfms=[None])\ndl = TabDataLoader(to.valid, bs=64, num_workers=0)", "_____no_output_____" ], [ "dl.show_batch()", "/home/sgugger/anaconda3/lib/python3.7/site-packages/cudf/io/dlpack.py:83: UserWarning: WARNING: cuDF to_dlpack() produces column-major (Fortran order) output. If the output tensor needs to be row major, transpose the output of this function.\n return cpp_dlpack.to_dlpack(gdf_cols)\n" ] ], [ [ "## Export -", "_____no_output_____" ] ], [ [ "#hide\nfrom local.notebook.export import notebook2script\nnotebook2script(all_fs=True)", "Converted 00_test.ipynb.\nConverted 01_core.ipynb.\nConverted 01a_utils.ipynb.\nConverted 01b_dispatch.ipynb.\nConverted 01c_torch_core.ipynb.\nConverted 02_script.ipynb.\nConverted 03_dataloader.ipynb.\nConverted 04_transform.ipynb.\nConverted 05_data_core.ipynb.\nConverted 06_data_transforms.ipynb.\nConverted 07_vision_core.ipynb.\nConverted 08_pets_tutorial.ipynb.\nConverted 09_vision_augment.ipynb.\nConverted 10_data_block.ipynb.\nConverted 11_layers.ipynb.\nConverted 11a_vision_models_xresnet.ipynb.\nConverted 12_optimizer.ipynb.\nConverted 13_learner.ipynb.\nConverted 14_callback_schedule.ipynb.\nConverted 14a_callback_data.ipynb.\nConverted 15_callback_hook.ipynb.\nConverted 15a_vision_models_unet.ipynb.\nConverted 16_callback_progress.ipynb.\nConverted 17_callback_tracker.ipynb.\nConverted 18_callback_fp16.ipynb.\nConverted 19_callback_mixup.ipynb.\nConverted 20_metrics.ipynb.\nConverted 21_vision_learner.ipynb.\nConverted 22_tutorial_imagenette.ipynb.\nConverted 23_tutorial_transfer_learning.ipynb.\nConverted 30_text_core.ipynb.\nConverted 31_text_data.ipynb.\nConverted 32_text_models_awdlstm.ipynb.\nConverted 33_text_models_core.ipynb.\nConverted 34_callback_rnn.ipynb.\nConverted 35_tutorial_wikitext.ipynb.\nConverted 36_text_models_qrnn.ipynb.\nConverted 37_text_learner.ipynb.\nConverted 38_tutorial_ulmfit.ipynb.\nConverted 40_tabular_core.ipynb.\nConverted 41_tabular_model.ipynb.\nConverted 42_tabular_rapids.ipynb.\nConverted 50_data_block_examples.ipynb.\nConverted 60_medical_imaging.ipynb.\nConverted 90_notebook_core.ipynb.\nConverted 91_notebook_export.ipynb.\nConverted 92_notebook_showdoc.ipynb.\nConverted 93_notebook_export2html.ipynb.\nConverted 94_notebook_test.ipynb.\nConverted 95_index.ipynb.\nConverted 96_data_external.ipynb.\nConverted 97_utils_test.ipynb.\nConverted notebook2jekyll.ipynb.\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
cbfe871d9e3b1dc964b724e13381c78939f052c3
187,323
ipynb
Jupyter Notebook
Prediction_all_features.ipynb
Yidi0221/TalkingData-Ad-Fraud-Detection
e82c99df543599f03f04ff13fedc40e5ac3fd170
[ "MIT" ]
null
null
null
Prediction_all_features.ipynb
Yidi0221/TalkingData-Ad-Fraud-Detection
e82c99df543599f03f04ff13fedc40e5ac3fd170
[ "MIT" ]
null
null
null
Prediction_all_features.ipynb
Yidi0221/TalkingData-Ad-Fraud-Detection
e82c99df543599f03f04ff13fedc40e5ac3fd170
[ "MIT" ]
null
null
null
65.982036
31,042
0.645943
[ [ [ "# Inspirational Notebooks\n### Generating new festures according to these notebooks", "_____no_output_____" ], [ "* https://www.kaggle.com/nuhsikander/lgbm-new-features-corrected\n* https://www.kaggle.com/rteja1113/lightgbm-with-count-features\n* https://www.kaggle.com/aharless/swetha-s-xgboost-revised\n* https://www.kaggle.com/bk0000/non-blending-lightgbm-model-lb-0-977", "_____no_output_____" ], [ "# Load Data", "_____no_output_____" ] ], [ [ "%matplotlib inline \nimport matplotlib.pyplot as plt \n\nimport math\nimport numpy as np\nimport pandas as pd\nimport seaborn as sns\nfrom pandas import read_csv\nimport sklearn\nfrom sklearn import preprocessing\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn.metrics import mean_squared_error\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.cross_validation import train_test_split\n\nimport warnings\nwarnings.filterwarnings('ignore')\n\nplt.rc(\"font\", size=14)\nsns.set(style=\"white\")\nsns.set(style=\"whitegrid\", color_codes=True)", "/Users/PP/Documents/7250/Python/anaconda/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n/Users/PP/Documents/7250/Python/anaconda/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n/Users/PP/Documents/7250/Python/anaconda/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n/Users/PP/Documents/7250/Python/anaconda/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n return f(*args, **kwds)\n/Users/PP/Documents/7250/Python/anaconda/lib/python3.5/site-packages/sklearn/cross_validation.py:41: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n \"This module will be removed in 0.20.\", DeprecationWarning)\n" ], [ "import keras\nfrom keras.datasets import mnist\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.optimizers import SGD\nfrom keras.optimizers import RMSprop\nfrom keras.optimizers import Adagrad\nfrom keras.optimizers import Adadelta", "Using TensorFlow backend.\n" ], [ "# df_train= pd.read_csv('data/train.csv',nrows=200000, parse_dates=['click_time'])\ndf_train = pd.read_csv('data/train_new_cols.csv',nrows=200000) #train data subset, original too large\ndf_train.dropna()\ndf_train.info()", "<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 200000 entries, 0 to 199999\nData columns (total 57 columns):\nUnnamed: 0 200000 non-null int64\nip 200000 non-null int64\napp 200000 non-null int64\ndevice 200000 non-null int64\nos 200000 non-null int64\nchannel 200000 non-null int64\nclick_time 200000 non-null object\nattributed_time 348 non-null object\nis_attributed 200000 non-null int64\nday 200000 non-null int64\nhour 200000 non-null int64\nminute 200000 non-null int64\nsecond 200000 non-null int64\nip_confRate 200000 non-null float64\napp_confRate 200000 non-null float64\ndevice_confRate 200000 non-null float64\nos_confRate 200000 non-null float64\nchannel_confRate 200000 non-null float64\napp_channel_confRate 200000 non-null float64\napp_os_confRate 200000 non-null float64\napp_device_confRate 200000 non-null float64\nchannel_os_confRate 200000 non-null float64\nchannel_device_confRate 200000 non-null float64\nos_device_confRate 200000 non-null float64\nip_app_channel_var_day 134217 non-null float64\nip_app_os_var_hour 139465 non-null float64\nip_day_channel_var_hour_x 151335 non-null float64\nip_day_hour_count_channel 200000 non-null int64\nip_app_count_channel 200000 non-null int64\nip_app_os_count_channel 200000 non-null int64\nip_app_day_hour_count_channel 200000 non-null int64\nip_app_channel_mean_hour 200000 non-null float64\napp_AvgViewPerDistinct_ip 200000 non-null float64\napp_count_channel 200000 non-null int64\nchannel_count_app 200000 non-null int64\nip_nunique_channel 200000 non-null int64\nip_nunique_app 200000 non-null int64\nip_day_nunique_hour 200000 non-null int64\nip_app_nunique_os 200000 non-null int64\nip_nunique_device 200000 non-null int64\napp_nunique_channel 200000 non-null int64\nip_device_os_nunique_app 200000 non-null int64\nip_device_os_cumcount_app 200000 non-null int64\nip_cumcount_app 200000 non-null int64\nip_cumcount_os 200000 non-null int64\nip_day_channel_var_hour_y 151335 non-null float64\nip_nextClick 197456 non-null float64\nip_app_nextClick 163726 non-null float64\nip_channel_nextClick 138039 non-null float64\nip_os_nextClick 182531 non-null float64\nip_app_device_os_channel_nextClick 86990 non-null float64\nip_os_device_nextClick 181508 non-null float64\nip_os_device_app_nextClick 120449 non-null float64\nprev_identical_clicks 200000 non-null int64\nfuture_identical_clicks 200000 non-null int64\nprev_app_clicks 200000 non-null int64\nfuture_app_clicks 200000 non-null int64\ndtypes: float64(24), int64(31), object(2)\nmemory usage: 87.0+ MB\n" ], [ "df_train.columns.values", "_____no_output_____" ], [ "cols = ['ip', 'app', 'device', 'os', 'channel', 'click_time',\n 'attributed_time', 'is_attributed', 'day', 'hour', 'minute',\n 'second', 'ip_confRate', 'app_confRate', 'device_confRate',\n 'os_confRate', 'channel_confRate', 'app_channel_confRate',\n 'app_os_confRate', 'app_device_confRate', 'channel_os_confRate',\n 'channel_device_confRate', 'os_device_confRate',\n 'ip_app_channel_var_day', 'ip_app_os_var_hour',\n 'ip_day_channel_var_hour_x', 'ip_day_hour_count_channel',\n 'ip_app_count_channel', 'ip_app_os_count_channel',\n 'ip_app_day_hour_count_channel', 'ip_app_channel_mean_hour',\n 'app_AvgViewPerDistinct_ip', 'app_count_channel',\n 'channel_count_app', 'ip_nunique_channel', 'ip_nunique_app',\n 'ip_day_nunique_hour', 'ip_app_nunique_os', 'ip_nunique_device',\n 'app_nunique_channel', 'ip_device_os_nunique_app',\n 'ip_device_os_cumcount_app', 'ip_cumcount_app', 'ip_cumcount_os',\n 'ip_day_channel_var_hour_y', 'ip_nextClick', 'ip_app_nextClick',\n 'ip_channel_nextClick', 'ip_os_nextClick',\n 'ip_app_device_os_channel_nextClick', 'ip_os_device_nextClick',\n 'ip_os_device_app_nextClick', 'prev_identical_clicks',\n 'future_identical_clicks', 'prev_app_clicks', 'future_app_clicks']\ndf_train = df_train.loc[:,cols]", "_____no_output_____" ], [ "df_train.head(10)", "_____no_output_____" ], [ "df_train['is_attributed'].value_counts()", "_____no_output_____" ], [ "sns.countplot(x='is_attributed', data=df_train, palette='hls')\nplt.show()", "_____no_output_____" ], [ "# The ratio of df_train to df_test is 0.8 to 0.2 or 0.75 to 0.25\ndf_test = pd.read_csv('data/train_new_cols.csv', nrows=50000,skiprows=range(1, 400000))\ndf_test.dropna()\ndf_test.info()", "<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 50000 entries, 0 to 49999\nData columns (total 57 columns):\nUnnamed: 0 50000 non-null int64\nip 50000 non-null int64\napp 50000 non-null int64\ndevice 50000 non-null int64\nos 50000 non-null int64\nchannel 50000 non-null int64\nclick_time 50000 non-null object\nattributed_time 88 non-null object\nis_attributed 50000 non-null int64\nday 50000 non-null int64\nhour 50000 non-null int64\nminute 50000 non-null int64\nsecond 50000 non-null int64\nip_confRate 50000 non-null float64\napp_confRate 50000 non-null float64\ndevice_confRate 50000 non-null float64\nos_confRate 50000 non-null float64\nchannel_confRate 50000 non-null float64\napp_channel_confRate 50000 non-null float64\napp_os_confRate 50000 non-null float64\napp_device_confRate 50000 non-null float64\nchannel_os_confRate 50000 non-null float64\nchannel_device_confRate 50000 non-null float64\nos_device_confRate 50000 non-null float64\nip_app_channel_var_day 34970 non-null float64\nip_app_os_var_hour 36809 non-null float64\nip_day_channel_var_hour_x 38512 non-null float64\nip_day_hour_count_channel 50000 non-null int64\nip_app_count_channel 50000 non-null int64\nip_app_os_count_channel 50000 non-null int64\nip_app_day_hour_count_channel 50000 non-null int64\nip_app_channel_mean_hour 50000 non-null float64\napp_AvgViewPerDistinct_ip 50000 non-null float64\napp_count_channel 50000 non-null int64\nchannel_count_app 50000 non-null int64\nip_nunique_channel 50000 non-null int64\nip_nunique_app 50000 non-null int64\nip_day_nunique_hour 50000 non-null int64\nip_app_nunique_os 50000 non-null int64\nip_nunique_device 50000 non-null int64\napp_nunique_channel 50000 non-null int64\nip_device_os_nunique_app 50000 non-null int64\nip_device_os_cumcount_app 50000 non-null int64\nip_cumcount_app 50000 non-null int64\nip_cumcount_os 50000 non-null int64\nip_day_channel_var_hour_y 38512 non-null float64\nip_nextClick 48959 non-null float64\nip_app_nextClick 39531 non-null float64\nip_channel_nextClick 32900 non-null float64\nip_os_nextClick 44728 non-null float64\nip_app_device_os_channel_nextClick 21355 non-null float64\nip_os_device_nextClick 44410 non-null float64\nip_os_device_app_nextClick 29197 non-null float64\nprev_identical_clicks 50000 non-null int64\nfuture_identical_clicks 50000 non-null int64\nprev_app_clicks 50000 non-null int64\nfuture_app_clicks 50000 non-null int64\ndtypes: float64(24), int64(31), object(2)\nmemory usage: 21.7+ MB\n" ], [ "df_test = df_test.loc[:,cols]", "_____no_output_____" ], [ "df_test.head(10)", "_____no_output_____" ], [ "df_test['is_attributed'].value_counts()", "_____no_output_____" ], [ "sns.countplot(x='is_attributed', data=df_test, palette='hls')\nplt.show()", "_____no_output_____" ], [ "# Get columes names except the click_time (object), attributed_time (object) and is_attributed\ntrain_cols = []\nfor each_value in df_train.columns.values:\n if each_value == 'click_time' or each_value == 'attributed_time' or each_value == 'is_attributed':\n continue\n train_cols.append(each_value)\ntrain_cols", "_____no_output_____" ], [ "# Data_X\nX_train = df_train.loc[:,train_cols]\nX_test = df_test.loc[:,train_cols]", "_____no_output_____" ], [ "X_train.shape", "_____no_output_____" ], [ "X_test.shape", "_____no_output_____" ], [ "# Data_Y\ny_train = df_train[['is_attributed']]\ny_test = df_test[['is_attributed']]", "_____no_output_____" ], [ "y_train.shape", "_____no_output_____" ], [ "y_test.shape", "_____no_output_____" ], [ "y_test_val = y_test.values", "_____no_output_____" ] ], [ [ "# Algorithms", "_____no_output_____" ], [ "### 1. Random Forest", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import RandomForestClassifier", "_____no_output_____" ], [ "def randomForest_pre(X_train, y_train, X_test, y_test):\n X_train = np.nan_to_num(X_train)\n y_train = np.nan_to_num(y_train)\n X_test = np.nan_to_num(X_test)\n y_test = np.nan_to_num(y_test)\n RF_model = RandomForestClassifier()\n # Train the model\n RF_fit = RF_model.fit(X_train, y_train)\n # Get the prediction of the test data\n RF_predict = RF_model.predict(X_test)\n # Compare the prediction with the known values\n RF_acc = sklearn.metrics.accuracy_score(np.array(RF_predict)[:], \n np.array(y_test_val)[:])\n # Plot the data\n plt.figure(figsize=(10,5))\n plt.plot(RF_predict, color='red', label='Prediction')\n plt.plot(y_test, label='Y_test')\n plt.legend(['Prediction', 'Y_test'])\n _ = plt.ylim()\n \n return RF_fit, RF_predict, RF_acc, RF_model", "_____no_output_____" ], [ "# RF_fit, RF_pre, RF_acc, rf_model = randomForest_pre(X_train, y_train, X_test, y_test)\n# RF_acc", "_____no_output_____" ] ], [ [ "### 2. Gradient Boosting & AdaBoost", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import GradientBoostingClassifier", "_____no_output_____" ], [ "def gradientBoosting_pre(X_train, y_train, X_test, y_test):\n X_train = np.nan_to_num(X_train)\n y_train = np.nan_to_num(y_train)\n X_test = np.nan_to_num(X_test)\n y_test = np.nan_to_num(y_test)\n GB_model = GradientBoostingClassifier(n_estimators=100, learning_rate=1.0, max_depth=1, random_state=0)\n # Train the model\n GB_fit = GB_model.fit(X_train, y_train)\n # Get the prediction of the test data\n GB_predict = GB_model.predict(X_test)\n # Compare the prediction with the known values\n GB_acc = sklearn.metrics.accuracy_score(np.array(GB_predict)[:], \n np.array(y_test_val)[:])\n \n # Plot the data\n plt.figure(figsize=(10,5))\n plt.plot(GB_predict, color='red', label='Prediction')\n plt.plot(y_test, label='Y_test')\n plt.legend(['Prediction', 'Y_test'])\n _ = plt.ylim()\n \n return GB_fit, GB_predict, GB_acc, GB_model", "_____no_output_____" ], [ "# GB_fit, GB_predict, GB_acc, gb_model = gradientBoosting_pre(X_train, y_train, X_test, y_test)\n# GB_acc", "_____no_output_____" ] ], [ [ "### 3. Logistic Regression", "_____no_output_____" ] ], [ [ "from sklearn.linear_model import LogisticRegression", "_____no_output_____" ], [ "def logisticRegression_pre(X_train, y_train, X_test, y_test):\n X_train = np.nan_to_num(X_train)\n y_train = np.nan_to_num(y_train)\n X_test = np.nan_to_num(X_test)\n y_test = np.nan_to_num(y_test)\n LG_model = LogisticRegression()\n # Train the model\n LG_fit = LG_model.fit(X_train, y_train)\n # Get the prediction of the test data\n LG_predict = LG_model.predict(X_test)\n # Compare the prediction with the known values\n LG_acc = sklearn.metrics.accuracy_score(np.array(LG_predict)[:], \n np.array(y_test_val)[:])\n \n return LG_fit, LG_predict, LG_acc, LG_model", "_____no_output_____" ], [ "# LG_fit, LG_predict, LG_acc, lg_model = logisticRegression_pre(X_train, y_train, X_test, y_test)\n# LG_acc", "_____no_output_____" ] ], [ [ "### 4. SVM - Support Vector Machine", "_____no_output_____" ] ], [ [ "from sklearn import svm", "_____no_output_____" ], [ "def svm_pre(X_train, y_train, X_test, y_test):\n X_train = np.nan_to_num(X_train)\n y_train = np.nan_to_num(y_train)\n X_test = np.nan_to_num(X_test)\n y_test = np.nan_to_num(y_test)\n svm_model = svm.SVC()\n uniq = np.unique(y_train[9000:10000])\n # Train the model\n SVM_fit = svm_model.fit(X_train[9000:10000], y_train[9000:10000])\n # Get the prediction of the test data\n SVM_predict = svm_model.predict(X_test)\n # Compare the prediction with the known values\n SVM_acc = sklearn.metrics.accuracy_score(np.array(LG_predict)[:], \n np.array(y_test_val)[:])\n return SVM_fit, SVM_predict, SVM_acc, svm_model", "_____no_output_____" ], [ "# svm_fit, svm_predict, svm_acc, svm_model = svm_pre(X_train, y_train, X_test, y_test)\n# svm_acc", "_____no_output_____" ] ], [ [ "### 5. ANN - Artificial Neural Network", "_____no_output_____" ] ], [ [ "def shallow_net_A(n=55,i=len(train_cols),o=2):\n # Create simple one dense layer net\n # Default 55 neurons, input 5, output 2\n net = Sequential()\n net.add(Dense(n, activation='sigmoid', input_shape=(i,)))\n net.add(Dense(2, activation='softmax'))\n # Compile net\n net.compile(loss='mean_squared_error', optimizer=SGD(lr=0.01), metrics=['accuracy'])\n return net", "_____no_output_____" ], [ "def ann_pre(X_train, y_train, X_test, y_test):\n ann_model = shallow_net_A()\n ann_summary = ann_model.summary()\n # Convert the values\n X_train_ann = np.nan_to_num(X_train)\n y_train_ann = np.nan_to_num(y_train)\n X_test_ann = np.nan_to_num(X_test)\n y_test_ann = np.nan_to_num(y_test)\n # Conver the matrix, finally we have two classes (n_classes), the original one has oly one class\n n_classes = 2\n y_train_ann = keras.utils.to_categorical(y_train_ann, n_classes)\n y_test_ann = keras.utils.to_categorical(y_test_ann, n_classes)\n # Training the model\n ann_fit = ann_model.fit(X_train_ann, y_train_ann, batch_size=128, epochs=99, verbose=1, validation_data=(X_test_ann, y_test_ann))\n \n # Evaluate: loss & accuracy -> Using Evaluation to get the accracy\n ann_evaluate = ann_model.evaluate(X_test_ann, y_test_ann)\n \n # Using prediction\n ann_pre = ann_model.predict(X_test)\n # Convert value to boolean value\n y_pre = (ann_pre > 0.5)\n # Counting the boolean value, counting the accuracy by using basic calculation\n from sklearn.metrics import confusion_matrix\n ann_output = confusion_matrix(y_test_ann.argmax(axis=1), y_pre.argmax(axis=1))\n ann_prediction_acc = ann_output[0][0]/(ann_output[0][0]+ann_output[1][0])\n \n return ann_summary, ann_fit, ann_evaluate, ann_prediction_acc, ann_model", "_____no_output_____" ], [ "# ann_summary, ann_fit, ann_evaluate, ann_prediction_acc, ann_model = ann_pre(X_train, y_train, X_test, y_test)", "_____no_output_____" ], [ "# ann_prediction_acc", "_____no_output_____" ] ], [ [ "### 6. MLP - Multi-layered Neural Network", "_____no_output_____" ] ], [ [ "def shallow_net_C(n=55,i=len(train_cols),o=2):\n # Create simple one dense layer net\n # Default 55 neurons, input 5, output 2, here we have more hidden layers with different activation\n net = Sequential()\n net.add(Dense(n, activation='sigmoid', input_shape=(i,)))\n net.add(Dense(n, activation='relu', input_shape=(i,)))\n net.add(Dense(n, activation='tanh', input_shape=(i,)))\n net.add(Dense(n, activation='elu', input_shape=(i,)))\n net.add(Dense(2, activation='softmax'))\n # Compile net\n net.compile(loss='mean_squared_error', optimizer=SGD(lr=0.01), metrics=['accuracy'])\n return net", "_____no_output_____" ], [ "def mlp_pre(X_train, y_train, X_test, y_test):\n mlp_model = shallow_net_C()\n mlp_summary = mlp_model.summary()\n # Convert the values\n X_train_mlp = np.nan_to_num(X_train)\n y_train_mlp = np.nan_to_num(y_train)\n X_test_mlp = np.nan_to_num(X_test)\n y_test_mlp = np.nan_to_num(y_test)\n # Conver the matrix, finally we have two classes (n_classes)\n n_classes = 2\n y_train_mlp = keras.utils.to_categorical(y_train_mlp, n_classes)\n y_test_mlp = keras.utils.to_categorical(y_test_mlp, n_classes)\n # Training the model\n mlp_fit = mlp_model.fit(X_train_mlp, y_train_mlp, batch_size=128, epochs=99, verbose=1, validation_data=(X_test_mlp, y_test_mlp))\n \n # Evaluate: loss & accuracy -> Using Evaluation to get the accracy\n mlp_evaluate = mlp_model.evaluate(X_test_mlp, y_test_mlp)\n \n # Using prediction\n mlp_pre = mlp_model.predict(X_test)\n # Convert value to boolean value\n y_pre = (mlp_pre > 0.5)\n # Counting the boolean value, counting the accuracy by using basic calculation\n from sklearn.metrics import confusion_matrix\n mlp_output = confusion_matrix(y_test_mlp.argmax(axis=1), y_pre.argmax(axis=1))\n mlp_prediction_acc = mlp_output[0][0]/(mlp_output[0][0]+mlp_output[1][0])\n \n return mlp_summary, mlp_fit, mlp_evaluate, mlp_prediction_acc, mlp_model", "_____no_output_____" ], [ "# mlp_summary, mlp_fit, mlp_evaluate, mlp_prediction_acc, mlp_model = mlp_pre(X_train, y_train, X_test, y_test)", "_____no_output_____" ], [ "# mlp_prediction_acc", "_____no_output_____" ] ], [ [ "### 7. RNN - Recurrent Neural Network (LSTM)", "_____no_output_____" ] ], [ [ "from keras.layers import LSTM", "_____no_output_____" ], [ "def rnn_pre(df_train, train_rate=0.75):\n # Set the dataset for train and test\n df_rnn_train = df_train.loc[:,train_cols + ['is_attributed']]\n df_rnn_test = df_test.loc[:,train_cols + ['is_attributed']]\n df_rnn = df_rnn_train.append(df_rnn_test)\n \n pre_col_index = list(df_rnn_train).index('is_attributed')\n dataset = df_rnn.values.astype('float32')\n dataset = np.nan_to_num(dataset)\n \n # Normalize the dataset, set all the data of the dataset to be in the range between 0 and 1\n scaler = MinMaxScaler(feature_range=(0, 1))\n dataset = scaler.fit_transform(dataset)\n \n # Split into train and test sets\n train_size = int(len(dataset) * train_rate)\n test_size = len(dataset) - train_size\n train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]\n \n # Use this function to prepare the train and test datasets for modeling\n look_back = 1\n trainY = train[:, pre_col_index]\n trainX = np.delete(train, pre_col_index, axis = 1) \n testY = test[:, pre_col_index]\n testX = np.delete(test, pre_col_index, axis = 1) \n \n # Reshape input to be [samples, time steps, features], here it changes the dimension from 2D to 3D\n trainX = np.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))\n testX = np.reshape(testX, (testX.shape[0], 1, testX.shape[1]))\n \n # Create and fit the LSTM network\n RNN_model = Sequential()\n RNN_model.add(LSTM(5, input_shape=(1, len(trainX[0][0]))))\n RNN_model.add(Dense(1))\n RNN_model.compile(loss='mean_squared_error', optimizer='adam')\n RNN_model.fit(trainX, trainY, epochs=10, batch_size=128, verbose=2)\n \n # Make predictions, trainPredict should be 1D array\n trainPredict = RNN_model.predict(trainX)\n testPredict = RNN_model.predict(testX)\n \n # Change the dimension from 3D to 2D\n trainX_2D = trainX.transpose([1,0,2]).reshape(len(trainX),len(trainX[0][0]))\n testX_2D = testX.transpose([1,0,2]).reshape(len(testX),len(testX[0][0]))\n \n # Append prediction back to the model\n trainPredict_6cols = np.append(trainX_2D, trainPredict, 1)\n testPredict_6cols = np.append(testX_2D, testPredict, 1)\n\n # Invert predictions back to normal values\n trainPredict_6cols = scaler.inverse_transform(trainPredict_6cols)\n testPredict_6cols = scaler.inverse_transform(testPredict_6cols)\n \n # Calculating the RMSE\n trainScore = math.sqrt(mean_squared_error(trainY, trainPredict_6cols[:, pre_col_index]))\n print('Train Score: %.2f RMSE' % (trainScore))\n testScore = math.sqrt(mean_squared_error(testY, testPredict_6cols[:, pre_col_index]))\n print('Test Score: %.2f RMSE' % (testScore))\n \n final_prediction_train = np.where(trainPredict_6cols[:, pre_col_index] > 0, 1, 0)\n final_prediction_test = np.where(testPredict_6cols[:, pre_col_index] > 0, 1, 0)\n \n # Change dimension from 2D to 1D\n final_prediction_train = np.reshape(final_prediction_train, (-1, 1))\n final_prediction_test = np.reshape(final_prediction_test, (-1, 1))\n \n # Counting the accuracy by using basic calculation\n rnn_acc_train = sklearn.metrics.accuracy_score(np.array(final_prediction_train)[:], \n np.array(trainY)[:])\n rnn_acc_test = sklearn.metrics.accuracy_score(np.array(final_prediction_test)[:], \n np.array(testY)[:])\n return rnn_acc_train", "_____no_output_____" ], [ "# rnn_acc = rnn_pre(df_train)\n# rnn_acc", "_____no_output_____" ] ], [ [ "# Train & Test Data (Call the function)", "_____no_output_____" ], [ "#### 1. Random Forest", "_____no_output_____" ] ], [ [ "RF_fit, RF_pre, RF_acc, rf_model = randomForest_pre(X_train, y_train, X_test, y_test)\nprint('Random Forest accuracy: {}%'.format(RF_acc * 100))", "Random Forest accuracy: 99.902%\n" ] ], [ [ "#### 2. Gradient Boosting", "_____no_output_____" ] ], [ [ "GB_fit, GB_predict, GB_acc, gb_model = gradientBoosting_pre(X_train, y_train, X_test, y_test)\nprint('Gradient Boosting accuracy: {}%'.format(GB_acc * 100))", "Gradient Boosting accuracy: 99.634%\n" ] ], [ [ "#### 3. Logistic Regression", "_____no_output_____" ] ], [ [ "LG_fit, LG_predict, LG_acc, lg_model = logisticRegression_pre(X_train, y_train, X_test, y_test)\nprint('Logistic Regression accuracy: {}%'.format(LG_acc * 100))", "Logistic Regression accuracy: 99.824%\n" ] ], [ [ "#### 4. SVM - Support Vector Machine", "_____no_output_____" ] ], [ [ "svm_fit, svm_predict, svm_acc, svm_model = svm_pre(X_train, y_train, X_test, y_test)\nprint('SVM accuracy: {}%'.format(svm_acc * 100))", "SVM accuracy: 99.824%\n" ] ], [ [ "#### 5. ANN - Artificial Neural Network", "_____no_output_____" ] ], [ [ "ann_summary, ann_fit, ann_evaluate, ann_prediction_acc, ann_model = ann_pre(X_train, y_train, X_test, y_test)", "_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\ndense_1 (Dense) (None, 55) 2970 \n_________________________________________________________________\ndense_2 (Dense) (None, 2) 112 \n=================================================================\nTotal params: 3,082\nTrainable params: 3,082\nNon-trainable params: 0\n_________________________________________________________________\nTrain on 200000 samples, validate on 50000 samples\nEpoch 1/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0034 - acc: 0.9982 - val_loss: 0.0020 - val_acc: 0.9982\nEpoch 2/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0019 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 3/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0018 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 4/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0018 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 5/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0018 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 6/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0018 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 7/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 8/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 9/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 10/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 11/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 12/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 13/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 14/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 15/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 16/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 17/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 18/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 19/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 20/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 21/99\n200000/200000 [==============================] - 3s 17us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 22/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 23/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 24/99\n200000/200000 [==============================] - 3s 13us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 25/99\n200000/200000 [==============================] - 3s 13us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 26/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 27/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 28/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 29/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 30/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 31/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 32/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 33/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 34/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 35/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 36/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 37/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 38/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 39/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 40/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 41/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 42/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 43/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 44/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 45/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 46/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 47/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 48/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 49/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 50/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 51/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 52/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 53/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 54/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 55/99\n200000/200000 [==============================] - 3s 16us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 56/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 57/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 58/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 59/99\n200000/200000 [==============================] - 3s 16us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 60/99\n200000/200000 [==============================] - 2s 12us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 61/99\n200000/200000 [==============================] - 2s 11us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 62/99\n200000/200000 [==============================] - 3s 13us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 63/99\n200000/200000 [==============================] - 3s 13us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 64/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 65/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 66/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 67/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 68/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 69/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 70/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 71/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 72/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 73/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 74/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 75/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 76/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 77/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 78/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 79/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 80/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 81/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 82/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 83/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 84/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 85/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 86/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 87/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 88/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 89/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 90/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 91/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 92/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 93/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 94/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 95/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 96/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 97/99\n200000/200000 [==============================] - 3s 15us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 98/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 99/99\n200000/200000 [==============================] - 3s 14us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\n50000/50000 [==============================] - 1s 16us/step\n" ], [ "print('ANN accuracy: {}%'.format(ann_prediction_acc * 100))", "ANN accuracy: 99.824%\n" ] ], [ [ "#### 6. MLP - Multi-layered Neural Network", "_____no_output_____" ] ], [ [ "mlp_summary, mlp_fit, mlp_evaluate, mlp_prediction_acc, mlp_model = mlp_pre(X_train, y_train, X_test, y_test)", "_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\ndense_3 (Dense) (None, 55) 2970 \n_________________________________________________________________\ndense_4 (Dense) (None, 55) 3080 \n_________________________________________________________________\ndense_5 (Dense) (None, 55) 3080 \n_________________________________________________________________\ndense_6 (Dense) (None, 55) 3080 \n_________________________________________________________________\ndense_7 (Dense) (None, 2) 112 \n=================================================================\nTotal params: 12,322\nTrainable params: 12,322\nNon-trainable params: 0\n_________________________________________________________________\nTrain on 200000 samples, validate on 50000 samples\nEpoch 1/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0057 - acc: 0.9941 - val_loss: 0.0019 - val_acc: 0.9982\nEpoch 2/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0018 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 3/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0018 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 4/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0018 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 5/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0018 - val_acc: 0.9982\nEpoch 6/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 7/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 8/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 9/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 10/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 11/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 12/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 13/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 14/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 15/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 16/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 17/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 18/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 19/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 20/99\n200000/200000 [==============================] - 4s 18us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 21/99\n200000/200000 [==============================] - 4s 18us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 22/99\n200000/200000 [==============================] - 4s 18us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 23/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 24/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 25/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 26/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 27/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 28/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 29/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 30/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 31/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 32/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 33/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 34/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 35/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 36/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 37/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 38/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 39/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 40/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 41/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 42/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 43/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 44/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 45/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 46/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 47/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 48/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 49/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 50/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 51/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 52/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 53/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 54/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 55/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 56/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 57/99\n200000/200000 [==============================] - 4s 19us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 58/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 59/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 60/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 61/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 62/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 63/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 64/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 65/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 66/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 67/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 68/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 69/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 70/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 71/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 72/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 73/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 74/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 75/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 76/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 77/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 78/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 79/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 80/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 81/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 82/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 83/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 84/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 85/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 86/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 87/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 88/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 89/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 90/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 91/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 92/99\n200000/200000 [==============================] - 4s 20us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 93/99\n200000/200000 [==============================] - 4s 22us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 94/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 95/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 96/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 97/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 98/99\n200000/200000 [==============================] - 5s 24us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\nEpoch 99/99\n200000/200000 [==============================] - 4s 21us/step - loss: 0.0017 - acc: 0.9983 - val_loss: 0.0017 - val_acc: 0.9982\n50000/50000 [==============================] - 1s 23us/step\n" ], [ "print('MLP accuracy: {}%'.format(mlp_prediction_acc * 100))", "MLP accuracy: 99.824%\n" ] ], [ [ "#### 7. RNN - Recurrent Neural Network", "_____no_output_____" ] ], [ [ "rnn_acc = rnn_pre(df_train)\nprint('RNN accuracy: {}%'.format(rnn_acc * 100))", "Epoch 1/10\n - 6s - loss: 0.0014\nEpoch 2/10\n - 5s - loss: 0.0012\nEpoch 3/10\n - 5s - loss: 0.0012\nEpoch 4/10\n - 5s - loss: 0.0012\nEpoch 5/10\n - 5s - loss: 0.0012\nEpoch 6/10\n - 5s - loss: 0.0012\nEpoch 7/10\n - 5s - loss: 0.0012\nEpoch 8/10\n - 5s - loss: 0.0012\nEpoch 9/10\n - 5s - loss: 0.0012\nEpoch 10/10\n - 4s - loss: 0.0012\nTrain Score: 0.03 RMSE\nTest Score: 0.03 RMSE\nRNN accuracy: 92.27680000000001%\n" ] ], [ [ "### Conclusion", "_____no_output_____" ] ], [ [ "print('Random Forest accuracy: {}%'.format(RF_acc * 100))\nprint('Gradient Boosting accuracy: {}%'.format(GB_acc * 100))\nprint('Logistic Regression accuracy: {}%'.format(LG_acc * 100))\nprint('SVM accuracy: {}%'.format(svm_acc * 100))\nprint('ANN accuracy: {}%'.format(ann_prediction_acc * 100))\nprint('MLP accuracy: {}%'.format(mlp_prediction_acc * 100))\nprint('RNN accuracy: {}%'.format(rnn_acc * 100))", "Random Forest accuracy: 99.92%\nGradient Boosting accuracy: 99.634%\nLogistic Regression accuracy: 99.824%\nSVM accuracy: 99.824%\nANN accuracy: 99.824%\nMLP accuracy: 99.824%\nRNN accuracy: 92.27680000000001%\n" ] ], [ [ "Random Forest is the best one for our project", "_____no_output_____" ], [ "# Prediction for test.csv", "_____no_output_____" ] ], [ [ "# Columns for our current analsis\ntrain_cols", "_____no_output_____" ], [ "# Read the test data\ndf = pd.read_csv('data/test_small_all_features.csv')[train_cols].astype('float64')\ndf = np.nan_to_num(df)", "_____no_output_____" ], [ "# Read the output of the test data\nsample_out = pd.read_csv('data/sample_submission.csv', nrows=1000000)[['is_attributed']].astype('float64')", "_____no_output_____" ], [ "df_predict = rf_model.predict(df)\n# Compare the prediction with the known values\ndf_acc = sklearn.metrics.accuracy_score(np.array(df_predict)[:], \n np.array(sample_out)[:])", "_____no_output_____" ], [ "print('By using the best algorittm, the accuracy of the prediction: {}%'.format(df_acc * 100))", "By using the best algorittm, the accuracy of the prediction: 100.0%\n" ] ], [ [ "The code in the document is licensed under the MIT License: https://opensource.org/licenses/MIT\n\nAll writing in the document is licensed bt The Creative Commons Attribution 3.0 https://creativecommons.org/licenses/by/3.0/us/.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ] ]
cbfe898fa864357baf8481e939fd951c4c2d67a0
333,927
ipynb
Jupyter Notebook
principal_component_analysis_.ipynb
pk1412/ML_practice-codes
20be9dd638cd5ca9bea2ed29358491379aba48cd
[ "MIT" ]
1
2018-03-16T16:17:49.000Z
2018-03-16T16:17:49.000Z
principal_component_analysis_.ipynb
pk1412/ML_practice-codes
20be9dd638cd5ca9bea2ed29358491379aba48cd
[ "MIT" ]
null
null
null
principal_component_analysis_.ipynb
pk1412/ML_practice-codes
20be9dd638cd5ca9bea2ed29358491379aba48cd
[ "MIT" ]
null
null
null
406.732034
103,084
0.900748
[ [ [ "<a href=\"https://colab.research.google.com/github/findingfoot/ML_practice-codes/blob/master/principal_component_analysis_.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "from sklearn import datasets\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport pandas as pd\ncancer = datasets.load_breast_cancer()\n", "_____no_output_____" ], [ "print(cancer.DESCR)", ".. _breast_cancer_dataset:\n\nBreast cancer wisconsin (diagnostic) dataset\n--------------------------------------------\n\n**Data Set Characteristics:**\n\n :Number of Instances: 569\n\n :Number of Attributes: 30 numeric, predictive attributes and the class\n\n :Attribute Information:\n - radius (mean of distances from center to points on the perimeter)\n - texture (standard deviation of gray-scale values)\n - perimeter\n - area\n - smoothness (local variation in radius lengths)\n - compactness (perimeter^2 / area - 1.0)\n - concavity (severity of concave portions of the contour)\n - concave points (number of concave portions of the contour)\n - symmetry \n - fractal dimension (\"coastline approximation\" - 1)\n\n The mean, standard error, and \"worst\" or largest (mean of the three\n largest values) of these features were computed for each image,\n resulting in 30 features. For instance, field 3 is Mean Radius, field\n 13 is Radius SE, field 23 is Worst Radius.\n\n - class:\n - WDBC-Malignant\n - WDBC-Benign\n\n :Summary Statistics:\n\n ===================================== ====== ======\n Min Max\n ===================================== ====== ======\n radius (mean): 6.981 28.11\n texture (mean): 9.71 39.28\n perimeter (mean): 43.79 188.5\n area (mean): 143.5 2501.0\n smoothness (mean): 0.053 0.163\n compactness (mean): 0.019 0.345\n concavity (mean): 0.0 0.427\n concave points (mean): 0.0 0.201\n symmetry (mean): 0.106 0.304\n fractal dimension (mean): 0.05 0.097\n radius (standard error): 0.112 2.873\n texture (standard error): 0.36 4.885\n perimeter (standard error): 0.757 21.98\n area (standard error): 6.802 542.2\n smoothness (standard error): 0.002 0.031\n compactness (standard error): 0.002 0.135\n concavity (standard error): 0.0 0.396\n concave points (standard error): 0.0 0.053\n symmetry (standard error): 0.008 0.079\n fractal dimension (standard error): 0.001 0.03\n radius (worst): 7.93 36.04\n texture (worst): 12.02 49.54\n perimeter (worst): 50.41 251.2\n area (worst): 185.2 4254.0\n smoothness (worst): 0.071 0.223\n compactness (worst): 0.027 1.058\n concavity (worst): 0.0 1.252\n concave points (worst): 0.0 0.291\n symmetry (worst): 0.156 0.664\n fractal dimension (worst): 0.055 0.208\n ===================================== ====== ======\n\n :Missing Attribute Values: None\n\n :Class Distribution: 212 - Malignant, 357 - Benign\n\n :Creator: Dr. William H. Wolberg, W. Nick Street, Olvi L. Mangasarian\n\n :Donor: Nick Street\n\n :Date: November, 1995\n\nThis is a copy of UCI ML Breast Cancer Wisconsin (Diagnostic) datasets.\nhttps://goo.gl/U2Uwz2\n\nFeatures are computed from a digitized image of a fine needle\naspirate (FNA) of a breast mass. They describe\ncharacteristics of the cell nuclei present in the image.\n\nSeparating plane described above was obtained using\nMultisurface Method-Tree (MSM-T) [K. P. Bennett, \"Decision Tree\nConstruction Via Linear Programming.\" Proceedings of the 4th\nMidwest Artificial Intelligence and Cognitive Science Society,\npp. 97-101, 1992], a classification method which uses linear\nprogramming to construct a decision tree. Relevant features\nwere selected using an exhaustive search in the space of 1-4\nfeatures and 1-3 separating planes.\n\nThe actual linear program used to obtain the separating plane\nin the 3-dimensional space is that described in:\n[K. P. Bennett and O. L. Mangasarian: \"Robust Linear\nProgramming Discrimination of Two Linearly Inseparable Sets\",\nOptimization Methods and Software 1, 1992, 23-34].\n\nThis database is also available through the UW CS ftp server:\n\nftp ftp.cs.wisc.edu\ncd math-prog/cpo-dataset/machine-learn/WDBC/\n\n.. topic:: References\n\n - W.N. Street, W.H. Wolberg and O.L. Mangasarian. Nuclear feature extraction \n for breast tumor diagnosis. IS&T/SPIE 1993 International Symposium on \n Electronic Imaging: Science and Technology, volume 1905, pages 861-870,\n San Jose, CA, 1993.\n - O.L. Mangasarian, W.N. Street and W.H. Wolberg. Breast cancer diagnosis and \n prognosis via linear programming. Operations Research, 43(4), pages 570-577, \n July-August 1995.\n - W.H. Wolberg, W.N. Street, and O.L. Mangasarian. Machine learning techniques\n to diagnose breast cancer from fine-needle aspirates. Cancer Letters 77 (1994) \n 163-171.\n" ], [ "#checking if 0 represents malignant or benign\n\n# we already know that there are 357 benign values. we count the count of data points that are classified as 1 and cross check with the information we already have\nlen(cancer.data[cancer.target == 1])\n", "_____no_output_____" ], [ "# How features affect the target\n\nfig, axes = plt.subplots(10,3, figsize = (12,9))\nmalignant = cancer.data[cancer.target == 0]\nbenign = cancer.data[cancer.target ==1]\n\nax = axes.ravel()\n\nfor i in range(30):\n _, bins = np.histogram(cancer.data[:,i], bins = 40)\n ax[i].hist(malignant[:,i], bins = bins, color = 'r', alpha = 0.5)\n ax[i].hist(benign[:,i], bins = bins, color = 'y', alpha = 0.8)\n ax[i].set_title(cancer.feature_names[i], fontsize = 8 )\n ax[i].axes.get_xaxis().set_visible(False)\n ax[i].set_yticks(())\n \nax[0].legend(['Malignant', 'Benign'], loc = \"best\")\nplt.tight_layout()\nplt.show()", "_____no_output_____" ], [ "cancer_df = pd.DataFrame(cancer.data, columns = cancer.feature_names)\ncancer_df.head()", "_____no_output_____" ], [ "plt.subplot(1,2,1)\nplt.scatter(cancer_df['worst symmetry'], cancer_df['worst texture'], s = cancer_df['worst area']*0.05,color = 'teal', label = 'check', alpha = 0.3)\nplt.xlabel('Worst Symmetry', fontsize = 12)\nplt.ylabel('Worst Texture', fontsize = 12)\nplt.subplot(1,2,2)\nplt.scatter(cancer_df['mean radius'], cancer_df['mean concave points'], s = cancer_df['mean area']*0.05,color = 'teal', label = 'check', alpha = 0.3)\nplt.xlabel('Mean Radius', fontsize = 12)\nplt.ylabel('Mean Concave', fontsize = 12)\nplt.subplot(1,2,2)", "/usr/local/lib/python3.6/dist-packages/matplotlib/figure.py:98: MatplotlibDeprecationWarning: \nAdding an axes using the same arguments as a previous axes currently reuses the earlier instance. In a future version, a new instance will always be created and returned. Meanwhile, this warning can be suppressed, and the future behavior ensured, by passing a unique label to each axes instance.\n \"Adding an axes using the same arguments as a previous axes \"\n" ], [ "# we need to scale the data before the fitting algorithm is implemented.\nfrom sklearn.preprocessing import StandardScaler\n\nscaler = StandardScaler()\nscaler.fit(cancer.data)\nscaled_x = scaler.transform(cancer.data)\nscaled_x.max(axis=0)", "_____no_output_____" ], [ "from sklearn.decomposition import PCA\npca = PCA(n_components = 3)\npca.fit(scaled_x)\nx_pca = pca.transform(scaled_x)", "_____no_output_____" ], [ "x_pca.shape", "_____no_output_____" ], [ "variance_test = np.var(x_pca, axis =0)\nvariance_ratio = variance_test/np.sum(variance_test)\nprint(variance_ratio)", "[0.60950217 0.2611802 0.12931763]\n" ], [ "Xax=x_pca[:,0]\nYax=x_pca[:,1]\nlabels=cancer.target\ncdict={0:'red',1:'green'}\nlabl={0:'Malignant',1:'Benign'}\nmarker={0:'*',1:'o'}\nalpha={0:.3, 1:.5}\nfig,ax=plt.subplots(figsize=(7,5))\nfig.patch.set_facecolor('white')\nfor l in np.unique(labels):\n ix=np.where(labels==l)\n \n ax.scatter(Xax[ix],Yax[ix],c=cdict[l],s=40,\n label=labl[l],marker=marker[l],alpha=alpha[l])\n# for loop ends\nplt.xlabel(\"First Principal Component\",fontsize=14)\nplt.ylabel(\"Second Principal Component\",fontsize=14)\nplt.legend()\nplt.show()", "_____no_output_____" ], [ "plt.matshow(pca.components_,cmap='viridis')\nplt.yticks([0,1,2],['1st Comp','2nd Comp','3rd Comp'],fontsize=10)\nplt.colorbar()\nplt.xticks(range(len(cancer.feature_names)),cancer.feature_names,rotation=65,ha='left')\nplt.tight_layout()\nplt.show()# ", "/usr/local/lib/python3.6/dist-packages/matplotlib/figure.py:2366: UserWarning: This figure includes Axes that are not compatible with tight_layout, so results might be incorrect.\n warnings.warn(\"This figure includes Axes that are not compatible \"\n" ], [ "feature_worst=list(cancer_df.columns[20:31]) # select the 'worst' features\nimport seaborn as sns\ns=sns.heatmap(cancer_df[feature_worst].corr(),cmap='coolwarm') \ns.set_yticklabels(s.get_yticklabels(),rotation=30,fontsize=7)\ns.set_xticklabels(s.get_xticklabels(),rotation=30,fontsize=7)\nplt.show()\n", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfe95ec2e7356efbb349fcc4752d8283a4fa255
38,882
ipynb
Jupyter Notebook
notebook/T09_C_Solutions.ipynb
ashnair1/sta-663-2019
17eb85b644c52978c2ef3a53a80b7fb031360e3d
[ "BSD-3-Clause" ]
68
2019-01-09T21:53:55.000Z
2022-02-16T17:14:22.000Z
notebook/T09_C_Solutions.ipynb
ashnair1/sta-663-2019
17eb85b644c52978c2ef3a53a80b7fb031360e3d
[ "BSD-3-Clause" ]
null
null
null
notebook/T09_C_Solutions.ipynb
ashnair1/sta-663-2019
17eb85b644c52978c2ef3a53a80b7fb031360e3d
[ "BSD-3-Clause" ]
62
2019-01-09T21:43:48.000Z
2021-11-15T04:26:25.000Z
37.350624
18,156
0.672445
[ [ [ "What you should know about C\n----\n\n- Write, compile and run a simple program in C\n- Static types\n- Control flow especially `for` loop\n- Using functions\n- Using structs\n- Pointers and arrays\n- Function pointers\n- Dynamic memory allocation\n- Separate compilation and `make`", "_____no_output_____" ], [ "### Structs", "_____no_output_____" ], [ "**Exercise 1**\n\nWrite and use a `struct` to represent dates.", "_____no_output_____" ] ], [ [ "\n\n\n\n", "_____no_output_____" ] ], [ [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex1.c\n#include <stdio.h>\n\ntypedef struct {\n int day;\n int month;\n int year;\n} date;\n\nint main(int argc, char* argv[])\n{\n date d1;\n d1.day = 29;\n d1.month = 3;\n d1.year = 2016;\n\n date d2 = {30, 3, 2016};\n\n date d3 = {.year = 2016, .month = 3, .day = 31};\n\n printf(\"%d-%d-%d\\n\", d1.month, d1.day, d1.year);\n printf(\"%d-%d-%d\\n\", d2.month, d2.day, d2.year);\n printf(\"%d-%d-%d\\n\", d3.month, d3.day, d3.year);\n}", "_____no_output_____" ], [ "%%bash\ngcc -std=c99 -o ex1 ex1.c", "_____no_output_____" ], [ "%%bash\n./ex1", "_____no_output_____" ] ], [ [ "### Pointers", "_____no_output_____" ], [ "**Exercise 2**\n\nWrite and use pointers for working with\n\n- (a) doubles\n- (b) the date struct\n- (c) vector of doubles\n- (d) 2D array of doubles", "_____no_output_____" ] ], [ [ "\n\n\n", "_____no_output_____" ], [ "\n\n\n", "_____no_output_____" ], [ "\n\n\n", "_____no_output_____" ], [ "\n\n\n", "_____no_output_____" ] ], [ [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex2a.c\n#include <stdio.h>\n#include <stdlib.h>\n\nint main(int argc, char* argv[])\n{\n double x1 = 2.78;\n double x2 = 3.14;\n\n double *p1 = malloc(sizeof(double));\n if (p1 == NULL) return -1;\n\n double *p2 = calloc(sizeof(double), 1);\n if (p2 == NULL) return -1;\n\n printf(\"%p: %.2f\\n\", p1, *p1);\n printf(\"%p: %.2f\\n\\n\", p2, *p2);\n\n p1 = &x1;\n *p2 = x2;\n\n printf(\"%p: %.2f\\n\", p1, *p1);\n printf(\"%p: %.2f\\n\", p2, *p2);\n\n // free(p1);\n // free(p2);\n}", "_____no_output_____" ], [ "%%bash\ngcc -std=c99 -o ex2a ex2a.c", "_____no_output_____" ], [ "%%bash\n./ex2a", "_____no_output_____" ] ], [ [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex2b.c\n#include <stdio.h>\n#include <stdlib.h>\n\ntypedef struct {\n int day;\n int month;\n int year;\n} date;\n\nint main(int argc, char* argv[])\n{\n date *d1 = malloc(sizeof(date));\n if (d1 == NULL) return -1;\n\n d1->day = 29;\n d1->month = 3;\n d1->year = 2016;\n\n printf(\"%d-%d-%d\\n\", d1->month, d1->day, d1->year);\n printf(\"%d-%d-%d\\n\", (*d1).month, (*d1).day, (*d1).year);\n\n free(d1);\n}", "_____no_output_____" ], [ "%%bash\ngcc -std=c99 -o ex2b ex2b.c", "_____no_output_____" ], [ "%%bash\n./ex2b", "_____no_output_____" ] ], [ [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex2c.c\n#include <stdio.h>\n#include <stdlib.h>\n\nint main(int argc, char* argv[])\n{\n int n = atoi(argv[1]);\n double *xs = calloc(sizeof(double), n);\n if (xs == NULL) return -1;\n \n for (int i=0; i<n; i++) {\n xs[i] = i*i;\n }\n\n printf(\"%.2f\\n\", *(xs));\n printf(\"%.2f\\n\", *(xs + 2));\n printf(\"%.2f\\n\", xs[0]);\n printf(\"%.2f\\n\", xs[2]);\n\n free(xs);\n}", "_____no_output_____" ], [ "%%bash\ngcc -std=c99 -o ex2c ex2c.c", "_____no_output_____" ], [ "%%bash\n./ex2c 10", "_____no_output_____" ] ], [ [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex2d.c\n#include <stdio.h>\n#include <stdlib.h>\n\nint main(int argc, char* argv[])\n{\n int rows = 2;;\n int cols = 3;\n double **xs = malloc(sizeof(double) * rows);\n for (int i=0; i < rows; i++) {\n xs[i] = calloc(sizeof(double), cols);\n }\n \n for (int i=0; i<rows; i++) {\n for (int j=0; j<cols; j++) {\n xs[i][j] = i+j;\n }\n }\n\n printf(\"%.2f\\n\", xs[0][0]);\n printf(\"%.2f\\n\", xs[1][2]);\n\n for (int i=0; i<rows; i++) {\n free(xs[i]);\n }\n free(xs);\n}", "_____no_output_____" ], [ "%%bash\ngcc -std=c99 -o ex2d ex2d.c", "_____no_output_____" ], [ "%%bash\n./ex2d", "_____no_output_____" ] ], [ [ "### Function pointers", "_____no_output_____" ], [ "**Exercise 3**\n\nWrite and use a function pointer.", "_____no_output_____" ], [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex3.c\n#include <stdio.h>\n#include <stdlib.h>\n\ndouble add(double x, double y) {\n return x + y;\n}\n\n\ndouble mult(double x, double y) {\n return x * y;\n}\n\nint main(int argc, char* argv[])\n{\n double a = 3.0;\n double b = 4.0;\n\n double (*f)(double, double) = add;\n\n typedef double (*fp)(double, double);\n fp g = mult;\n\n printf(\"%.2f\\n\", add(a, b));\n printf(\"%.2f\\n\", f(a, b));\n printf(\"%.2f\\n\", g(a, b));\n\n}", "_____no_output_____" ], [ "%%bash\ngcc -std=c99 -o ex3 ex3.c", "_____no_output_____" ], [ "%%bash\n./ex3", "_____no_output_____" ] ], [ [ "### Separate compilation", "_____no_output_____" ], [ "**Exercise 4**\n\nWrite header and implementation files for the add function, and use the function in a separate driver file. Use a makefile to compile the executable.", "_____no_output_____" ] ], [ [ "\n\n\n", "_____no_output_____" ] ], [ [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex4.h\n\n#pragma once\ndouble add(double x, double y);\n", "_____no_output_____" ], [ "%%file ex4.c\n#include \"ex4.h\"\n\ndouble add(double x, double y) {\n return x + y;\n}\n", "_____no_output_____" ], [ "%%file ex4_main.c\n#include <stdio.h>\n#include \"ex4.h\"\n\nint main() {\n double a = 3.0;\n double b = 4.0;\n\n printf(\"%.2f\\n\", add(a, b));\n}\n", "_____no_output_____" ], [ "%%file makefile\n\nex4_main: ex4_main.c ex4.o\n \t gcc -std=c99 -o ex4_main ex4_main.c ex4.o\n\nex4.o: ex4.c\n \t gcc -std=c99 -c ex4.c", "Overwriting makefile\n" ], [ "%%bash\nmake", "cc -c -o ex4.o ex4.c\ncc ex4_main.c ex4.o -o ex4_main\n" ], [ "%%bash\n./ex4_main", "7.00\n" ], [ "%%file makefile\nTARGET = ex4_main\nOBJECTS = ex4.o\nCFLAGS = -O3 -std=c99\nLDLIBS = -lm\nCC = gcc\n\nall: $(TARGET)\n \nclean:\n\t rm $(TARGET) $(OBJECTS)\n\n$(TARGET): $(OBJECTS)", "Overwriting makefile\n" ], [ "%%bash\nmake clean\nmake", "rm ex4_main ex4.o\n" ], [ "%%bash\n./ex4_main", "_____no_output_____" ] ], [ [ "What you should know about C++\n----\n\n- Anonymous functions\n- Generalized function pointers\n- Ranged for\n- Using the standard template library\n - Iterators\n - Containers\n - Algorithms\n- The `random` library\n- Using `amradillo`", "_____no_output_____" ], [ "**Exercise 5**\n\nImplement Newton's method in 1D for root finding. Pass in the function and gradient as generalized function pointers. Use the method to find all roots of the polynomial equation $f(x) = x^3 - 7x - 6$", "_____no_output_____" ] ], [ [ "\n\n\n", "_____no_output_____" ] ], [ [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex5.cpp\n#include <iostream>\n#include <vector>\n#include <iomanip>\n#include <cmath>\n#include <functional> \nusing std::vector;\nusing std::cout;\nusing std::function;\n\nusing func = function<double(double)>;\n\ndouble newton(double x, func f, func fprime, int max_iter=10) {\n for (int i=0; i<max_iter; i++) {\n x -= f(x)/fprime(x);\n }\n return x;\n};\n\nint main()\n{\n auto f = [](double x) { return pow(x, 3) - 7*x - 6; };\n auto fprime = [](double x) { return 3.0*pow(x, 2) - 7; };\n \n vector<double> x = {-5, 0, 5};\n for (auto x_: x) {\n cout << std::setw(2) << x_ << \": \" \n << std::setw(3) << newton(x_, f, fprime) << \"\\n\";\n }\n}", "Overwriting ex5.cpp\n" ], [ "%%bash\ng++ -std=c++11 ex5.cpp -o ex5", "_____no_output_____" ], [ "%%bash\n./ex5", "-5: -2\n 0: -1\n 5: 3\n" ] ], [ [ "**Exercise 6**\n\nUse the armadillo library to\n\n- Generate 10 x-coordinates linearly spaced between 10 and 15\n- Generate 10 random y-values as $y = 3x^2 - 7x + 2 + \\epsilon$ where $\\epsilon \\sim 10 N(0,1)$\n- Find the length of $x$ and $y$ and the Euclidean distance between $x$ and $y$\n- Find the correlation between $x$ and $y$\n- Solve the linear system to find a quadratic fit for this data", "_____no_output_____" ] ], [ [ "\n\n\n", "_____no_output_____" ] ], [ [ "**Solution**", "_____no_output_____" ] ], [ [ "%%file ex6.cpp\n#include <iostream>\n#include <fstream>\n#include <armadillo>\nusing std::cout;\nusing std::ofstream;\nusing namespace arma;\n\nint main() \n{\n vec x = linspace<vec>(10.0,15.0,10);\n vec eps = 10*randn<vec>(10);\n vec y = 3*x%x - 7*x + 2 + eps; \n\n cout << \"x:\\n\" << x << \"\\n\";\n cout << \"y:\\n\" << y << \"\\n\";\n\n cout << \"Lenght of x is: \" << norm(x) << \"\\n\";\n cout << \"Lenght of y is: \" << norm(y) << \"\\n\";\n\n cout << \"Distance(x, y) is: \" << norm(x-y) << \"\\n\";\n\n cout << \"Correlation(x, y) is: \" << cor(x, y) << \"\\n\";\n\n mat A = join_rows(ones<vec>(10), x);\n A = join_rows(A, x%x);\n cout << \"A:\\n\" << A << \"\\n\";\n\n vec b = solve(A, y);\n cout << \"b:\\n\" << b << \"\\n\";\n\n ofstream fout1(\"x.txt\");\n x.print(fout1);\n ofstream fout2(\"y.txt\");\n y.print(fout2);\n ofstream fout3(\"b.txt\");\n b.print(fout3);\n}", "_____no_output_____" ], [ "%%bash\ng++ -std=c++11 ex6.cpp -o ex6 -larmadillo", "_____no_output_____" ], [ "%%bash\n./ex6", "x:\n 10.0000\n 10.5556\n 11.1111\n 11.6667\n 12.2222\n 12.7778\n 13.3333\n 13.8889\n 14.4444\n 15.0000\n\ny:\n 2.4802e+02\n 2.6412e+02\n 2.9157e+02\n 3.3056e+02\n 3.4089e+02\n 3.9573e+02\n 4.4029e+02\n 4.7583e+02\n 5.2827e+02\n 5.8404e+02\n\nLenght of x is: 39.8493\nLenght of y is: 1280.16\nDistance(x, y) is: 1240.76\nCorrelation(x, y) is: 0.9887\n\nA:\n 1.0000e+00 1.0000e+01 1.0000e+02\n 1.0000e+00 1.0556e+01 1.1142e+02\n 1.0000e+00 1.1111e+01 1.2346e+02\n 1.0000e+00 1.1667e+01 1.3611e+02\n 1.0000e+00 1.2222e+01 1.4938e+02\n 1.0000e+00 1.2778e+01 1.6327e+02\n 1.0000e+00 1.3333e+01 1.7778e+02\n 1.0000e+00 1.3889e+01 1.9290e+02\n 1.0000e+00 1.4444e+01 2.0864e+02\n 1.0000e+00 1.5000e+01 2.2500e+02\n\nb:\n 5.8621e+02\n -1.0156e+02\n 6.7586e+00\n\n" ], [ "x = np.loadtxt('x.txt')\ny = np.loadtxt('y.txt')\nb = np.loadtxt('b.txt')\n\nplt.scatter(x, y, s=40)\nplt.plot(x, b[0] + b[1]*x + b[2]*x**2, c='red')\npass", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
cbfeac1ec7cfecda5e87fed8d2c8b238dcb0dec7
77,068
ipynb
Jupyter Notebook
4_TrainingModel.ipynb
alex-agency/keras-movie-reviews-classification
495b938df048d6e8bd3567477e0e5d691ce0e67e
[ "MIT" ]
1
2019-08-31T17:55:42.000Z
2019-08-31T17:55:42.000Z
4_TrainingModel.ipynb
alex-agency/keras-movie-reviews-classification
495b938df048d6e8bd3567477e0e5d691ce0e67e
[ "MIT" ]
null
null
null
4_TrainingModel.ipynb
alex-agency/keras-movie-reviews-classification
495b938df048d6e8bd3567477e0e5d691ce0e67e
[ "MIT" ]
null
null
null
112.508029
28,356
0.812516
[ [ [ "## Training Network\n\nIn supervised training, the network processes inputs and compares its resulting outputs against the desired outputs. \n\nErrors are propagated back through the system, causing the system to adjust the weights which control the network. This is done using the Backpropagation algorithm, also called backprop. This process occurs over and over as the weights are continually tweaked. \n\nThe set of data which enables the training is called the \"training set.\" \n\nDuring the training of a network the same set of data is processed many times as the connection weights are ever refined. Iteratively passing batches of data through the network and updating the weights, so that the error is decreased, is known as Stochastic Gradient Descent (SGD). \n\nTraining refers to determining the best set of weights for maximizing a neural network’s accuracy. \n\nThe amount by which the weights are changed is determined by a parameter called Learning rate.\n\nNeural networks can be used without knowing precisely how training works. Most modern machine learning libraries have greatly automated the training process.", "_____no_output_____" ], [ "### NOTE:\nBasicaly this notebook prepared to use within **Google Colab**: https://colab.research.google.com/. \n\nThe Google Colabatory has **free Tesla K80 GPU** and already prepared to develop deep learning applications.\n\nFirst time opens this notebook, do not forget to enable **Python 3** runtime and **GPU** accelerator in Google Colab **Notebook Settings**. \n", "_____no_output_____" ], [ "### Setup Project\nCreate workspace and change directory.\n", "_____no_output_____" ] ], [ [ "PROJECT_HOME = '/content/keras-movie-reviews-classification'\n\nimport os.path\nif not os.path.exists(PROJECT_HOME):\n os.makedirs(PROJECT_HOME)\nos.chdir(PROJECT_HOME)\n\n!pwd", "/content/keras-movie-reviews-classification\r\n" ] ], [ [ "### Import Project\nImport GitHub project to workspace.", "_____no_output_____" ] ], [ [ "# Import project and override existing data.\n!git init .\n!git remote add -t \\* -f origin https://github.com/alex-agency/keras-movie-reviews-classification.git\n!git reset --hard origin/master\n!git checkout\n\n!ls -la input", "Initialized empty Git repository in /content/keras-movie-reviews-classification/.git/\nUpdating origin\nremote: Counting objects: 27, done.\u001b[K\nremote: Compressing objects: 100% (22/22), done.\u001b[K\nremote: Total 27 (delta 8), reused 18 (delta 3), pack-reused 0\u001b[K\nUnpacking objects: 100% (27/27), done.\nFrom https://github.com/alex-agency/keras-movie-reviews-classification\n * [new branch] master -> origin/master\nHEAD is now at 3ed38b6 step3-choosing-model-mlps\ntotal 36044\ndrwxr-xr-x 2 root root 4096 Apr 22 20:37 .\ndrwxr-xr-x 4 root root 4096 Apr 22 20:37 ..\n-rw-r--r-- 1 root root 17163254 Apr 22 20:37 dataset.npz\n-rw-r--r-- 1 root root 360416 Apr 22 20:37 mlps-model-definition.h5\n-rw-r--r-- 1 root root 19372046 Apr 22 20:37 reviews.tsv.bz2\n" ] ], [ [ "### Keras\nKeras is a high-level API, written in Python and capable of running on top of TensorFlow, Theano, or CNTK deep learning frameworks.\nKeras provides a simple and modular API to create and train Neural Networks, hiding most of the complicated details under the hood.\nBy default, Keras is configured to use Tensorflow as the backend since it is the most popular choice.\nKeras is becoming super popular recently because of its simplicity.\n### Keras workflow\n<img src=\"https://www.learnopencv.com/wp-content/uploads/2017/09/keras-workflow.jpg\" width=\"700px\">", "_____no_output_____" ] ], [ [ "# Load Keras libraries\nfrom keras.models import load_model\nfrom keras import callbacks", "Using TensorFlow backend.\n" ] ], [ [ "### Load model and dataset\nLoading model definition from HDF5 file.\n", "_____no_output_____" ] ], [ [ "import numpy as np\n# Load data from numpy array\nloaded = np.load('input/dataset.npz')\n\n(X_train, Y_train), (X_test, Y_test) = loaded['dataset']\n\n# Load model from HDF5 file.\nmodel = load_model('input/mlps-model-definition.h5') # model with MLP network\n\nprint(\"Model Summary\")\nprint(model.summary())", "Model Summary\n_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\nembedding_1 (Embedding) (None, 400, 1) 5000 \n_________________________________________________________________\nflatten_1 (Flatten) (None, 400) 0 \n_________________________________________________________________\ndense_1 (Dense) (None, 200) 80200 \n_________________________________________________________________\ndropout_1 (Dropout) (None, 200) 0 \n_________________________________________________________________\ndense_2 (Dense) (None, 2) 402 \n=================================================================\nTotal params: 85,602\nTrainable params: 85,602\nNon-trainable params: 0\n_________________________________________________________________\nNone\n" ] ], [ [ "### Configuring the training process\nOnce the model is ready, we need to configure the learning process.\n\nCompile the model means that Keras will generate a computation graph in TensorFlow.\n\n### Loss functions\nIn a supervised learning problem, we have to find the error between the actual values and the predicted value. There can be different metrics which can be used to evaluate this error. This metric is often called loss function or cost function or objective function. There can be more than one loss function depending on what you are doing with the error. In general, we use:\n\n* binary-cross-entropy for a binary classification problem\n* categorical-cross-entropy for a multi-class classification problem\n* mean-squared-error for a regression problem and so on\n\n### Optimizers\nAn Optimizer determines how the network weights are updated.\n\nKeras provides a lot of optimizers to choose from.\nRMSprop and Adam is a good choice of optimizer for most problems.\n\n### Overfitting\nOverfitting describes the situation in which your model is over-optimized to accurately predict the training set, at the expense of generalizing to unknown data (which is the objective of learning in the first place). This can happen because the model greatly twists itself to perfectly conform to the training set, even capturing its underlying noise.\n\nHow can we avoid overfitting? The simplest solution is to split our dataset into a training set and a test set. The training set is used for the optimization procedure, but we evaluate the accuracy of our model by forwarding the test set to the trained model and measuring its accuracy.\n\nDuring training, we can monitor the accuracy of the model on the training set and test set. The longer we train, the more likely our training accuracy is to go higher and higher, but at some point, it is likely the test set will stop improving. This is a cue to stop training at that point. We should generally expect that training accuracy is higher than test accuracy, but if it is much higher, that is a clue that we have overfit.", "_____no_output_____" ] ], [ [ "# Compile model\nmodel.compile(loss='binary_crossentropy', # cross-entropy loss function for binary classification\n optimizer='adam', # Adam optimiser one of the most popular optimization method\n metrics=['accuracy']) # print the accuracy during training\n\n# Early stopping callback\n# Stop training when a monitored quantity has stopped improving.\n# Using held-out validation set, to determine when to terminate the training process to avoid overfitting.\nearly_stopping = callbacks.EarlyStopping(monitor='val_loss', # quantity to be monitored\n min_delta=0, # minimum change in the monitored quantity to qualify as an improvement\n patience=2, # number of epochs with no improvement after which training will be stopped \n verbose=1, mode='auto')\n# Train model\nhistory = model.fit(X_train, Y_train, # train the model using the training set\n batch_size=8, # in each iteration, use size of training examples at once\n epochs=20, # iterate amount of times over the entire training set\n callbacks=[early_stopping], # called after each epoch\n validation_split=0.2, # use 20% of the data for validation\n verbose=2) # enables detailed logs, where 2 is print some information after each epoch ", "Train on 32000 samples, validate on 8000 samples\nEpoch 1/20\n - 25s - loss: 0.3563 - acc: 0.8333 - val_loss: 0.2842 - val_acc: 0.8824\nEpoch 2/20\n - 24s - loss: 0.2389 - acc: 0.9042 - val_loss: 0.2868 - val_acc: 0.8796\nEpoch 3/20\n - 24s - loss: 0.2105 - acc: 0.9167 - val_loss: 0.3067 - val_acc: 0.8801\nEpoch 00003: early stopping\n" ], [ "# Evaluate model\nscore = model.evaluate(X_test, Y_test, verbose=0) # evaluate the trained model on the test set\n\nprint('Test loss:', score[0])\nprint('Test accuracy:', score[1])", "Test loss: 0.29445332052707673\nTest accuracy: 0.88235\n" ], [ "import matplotlib.pyplot as plt\n\n# Plot the loss over each epochs.\nplt.plot(history.history['loss'], label='training')\nplt.plot(history.history['val_loss'], label='validation')\nplt.legend()\nplt.title('model loss')\nplt.ylabel('loss')\nplt.xlabel('epoch')\nplt.show()", "_____no_output_____" ], [ "# Plot the accuracy evaluated on the training set.\nplt.plot(history.history['acc'], label='training');\nplt.plot(history.history['val_acc'], label='validation');\nplt.legend()\nplt.title('model accuracy')\nplt.ylabel('accuracy')\nplt.xlabel('epoch')\nplt.show()", "_____no_output_____" ] ], [ [ "### Export trained model to file\nSaving whole Keras model into a single HDF5 file which will contain:\n* the architecture of the model, allowing to re-create the model\n* the weights of the model\n* the training configuration (loss, optimizer)\n* the state of the optimizer, allowing to resume training exactly where you left off.\n", "_____no_output_____" ] ], [ [ "# Model filename\nmodel_filename = 'mlps-model.h5'\n\n# Create output directory\noutput_dir = 'output'\nif not os.path.exists(output_dir):\n os.makedirs(output_dir)\n \nmodel_file = os.path.join(output_dir, model_filename)\n\n# Export model into HDF5 file.\nmodel.save(model_file)\n\n!ls -la output", "total 1040\r\ndrwxr-xr-x 2 root root 4096 Apr 22 20:39 .\r\ndrwxr-xr-x 5 root root 4096 Apr 22 20:39 ..\r\n-rw-r--r-- 1 root root 1055200 Apr 22 20:39 mlps-model.h5\r\n" ] ], [ [ "### Downloading file to your local file system\n\nIt will invoke a browser download of the file to your local computer.", "_____no_output_____" ] ], [ [ "from google.colab import files\n# Download file\nfiles.download(model_file)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cbfeb42b2689c54e645fc855297affca415290ba
120,181
ipynb
Jupyter Notebook
src/deeplearning_with_python/chapter03/00_imdb_classification.ipynb
adrianogil/GANS
657517322e8352db2e04ad95557a4b7246ff6cbc
[ "MIT" ]
null
null
null
src/deeplearning_with_python/chapter03/00_imdb_classification.ipynb
adrianogil/GANS
657517322e8352db2e04ad95557a4b7246ff6cbc
[ "MIT" ]
null
null
null
src/deeplearning_with_python/chapter03/00_imdb_classification.ipynb
adrianogil/GANS
657517322e8352db2e04ad95557a4b7246ff6cbc
[ "MIT" ]
null
null
null
302.722922
35,488
0.719723
[ [ [ "# Chapter 3 - a binary classification example", "_____no_output_____" ] ], [ [ "from keras.datasets import imdb\nfrom keras import models, layers\nfrom keras import optimizers\nfrom keras import losses\nfrom keras import metrics\nimport numpy as np\n\nimport matplotlib.pyplot as plt", "_____no_output_____" ] ], [ [ "## Loading dataset", "_____no_output_____" ] ], [ [ "# Suggested code - doesn't work\n# (train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000)", "_____no_output_____" ] ], [ [ "### Workaround to load dataset:\nhttps://stackoverflow.com/a/56243777\n", "_____no_output_____" ] ], [ [ "\n# save np.load\nnp_load_old = np.load\n\n# modify the default parameters of np.load\nnp.load = lambda *a,**k: np_load_old(*a, allow_pickle=True, **k)\n\n# call load_data with allow_pickle implicitly set to true\n(train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000)\n\n# restore np.load for future normal usage\nnp.load = np_load_old", "_____no_output_____" ] ], [ [ "### Example of decoding a review back to English", "_____no_output_____" ] ], [ [ "word_index = imdb.get_word_index()\nreverse_word_index = dict(\n [(value, key) for (key, value) in word_index.items()] \n)\ndecoded_review = ' '.join([reverse_word_index.get(i - 3, '?') for i in train_data[0]])", "_____no_output_____" ], [ "decoded_review", "_____no_output_____" ] ], [ [ "## Preparing data", "_____no_output_____" ] ], [ [ "def vectorize_sequences(sequences, dimension=10000):\n results = np.zeros((len(sequences), dimension))\n for i, sequence in enumerate(sequences):\n results[i, sequence] = 1\n return results\n\nx_train = vectorize_sequences(train_data)\nx_test = vectorize_sequences(test_data)", "_____no_output_____" ], [ "y_train = np.asarray(train_labels).astype('float32')\ny_test = np.asarray(test_labels).astype('float32')\n", "_____no_output_____" ] ], [ [ "## Model definition", "_____no_output_____" ] ], [ [ "model = models.Sequential()\nmodel.add(layers.Dense(16, activation='relu', input_shape=(10000,)))\nmodel.add(layers.Dense(16, activation='relu'))\nmodel.add(layers.Dense(1, activation='sigmoid'))\n\nmodel.compile(\n optimizer=optimizers.RMSprop(lr=0.001),\n loss=losses.binary_crossentropy,\n metrics=[metrics.binary_accuracy]\n)", "WARNING: Logging before flag parsing goes to stderr.\nW0430 12:37:19.291604 4535760320 deprecation_wrapper.py:119] From /Users/adriano.gil/.virtualenvs/pydeep/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:74: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead.\n\nW0430 12:37:19.436863 4535760320 deprecation_wrapper.py:119] From /Users/adriano.gil/.virtualenvs/pydeep/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:517: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n\nW0430 12:37:19.460834 4535760320 deprecation_wrapper.py:119] From /Users/adriano.gil/.virtualenvs/pydeep/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:4138: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead.\n\nW0430 12:37:19.550854 4535760320 deprecation_wrapper.py:119] From /Users/adriano.gil/.virtualenvs/pydeep/lib/python3.6/site-packages/keras/optimizers.py:790: The name tf.train.Optimizer is deprecated. Please use tf.compat.v1.train.Optimizer instead.\n\nW0430 12:37:19.557961 4535760320 deprecation_wrapper.py:119] From /Users/adriano.gil/.virtualenvs/pydeep/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:3376: The name tf.log is deprecated. Please use tf.math.log instead.\n\nW0430 12:37:19.565245 4535760320 deprecation.py:323] From /Users/adriano.gil/.virtualenvs/pydeep/lib/python3.6/site-packages/tensorflow/python/ops/nn_impl.py:180: add_dispatch_support.<locals>.wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse tf.where in 2.0, which has the same broadcast rule as np.where\n" ] ], [ [ "## Creating a validation set", "_____no_output_____" ] ], [ [ "x_val = x_train[:10000]\npartial_x_train = x_train[10000:]\n\ny_val = y_train[:10000]\npartial_y_train = y_train[10000:]", "_____no_output_____" ] ], [ [ "## Training model", "_____no_output_____" ] ], [ [ "history = model.fit(\n partial_x_train,\n partial_y_train,\n epochs=20,\n batch_size=512,\n validation_data=(x_val, y_val)\n)", "W0430 12:40:11.840742 4535760320 deprecation_wrapper.py:119] From /Users/adriano.gil/.virtualenvs/pydeep/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:986: The name tf.assign_add is deprecated. Please use tf.compat.v1.assign_add instead.\n\nTrain on 15000 samples, validate on 10000 samples\nEpoch 1/20\n15000/15000 [==============================] - 5s 307us/step - loss: 0.5096 - binary_accuracy: 0.7808 - val_loss: 0.3827 - val_binary_accuracy: 0.8670\nEpoch 2/20\n15000/15000 [==============================] - 3s 177us/step - loss: 0.3010 - binary_accuracy: 0.9043 - val_loss: 0.2993 - val_binary_accuracy: 0.8905\nEpoch 3/20\n15000/15000 [==============================] - 2s 146us/step - loss: 0.2158 - binary_accuracy: 0.9290 - val_loss: 0.3080 - val_binary_accuracy: 0.8726\nEpoch 4/20\n15000/15000 [==============================] - 2s 144us/step - loss: 0.1723 - binary_accuracy: 0.9439 - val_loss: 0.2836 - val_binary_accuracy: 0.8847\nEpoch 5/20\n15000/15000 [==============================] - 2s 146us/step - loss: 0.1388 - binary_accuracy: 0.9555 - val_loss: 0.2859 - val_binary_accuracy: 0.8863\nEpoch 6/20\n15000/15000 [==============================] - 2s 147us/step - loss: 0.1112 - binary_accuracy: 0.9663 - val_loss: 0.3093 - val_binary_accuracy: 0.8809\nEpoch 7/20\n15000/15000 [==============================] - 2s 149us/step - loss: 0.0930 - binary_accuracy: 0.9727 - val_loss: 0.3167 - val_binary_accuracy: 0.8843\nEpoch 8/20\n15000/15000 [==============================] - 2s 152us/step - loss: 0.0765 - binary_accuracy: 0.9777 - val_loss: 0.3855 - val_binary_accuracy: 0.8666\nEpoch 9/20\n15000/15000 [==============================] - 2s 149us/step - loss: 0.0616 - binary_accuracy: 0.9832 - val_loss: 0.3720 - val_binary_accuracy: 0.8761\nEpoch 10/20\n15000/15000 [==============================] - 2s 141us/step - loss: 0.0506 - binary_accuracy: 0.9867 - val_loss: 0.3921 - val_binary_accuracy: 0.8791\nEpoch 11/20\n15000/15000 [==============================] - 2s 146us/step - loss: 0.0405 - binary_accuracy: 0.9903 - val_loss: 0.4249 - val_binary_accuracy: 0.8772\nEpoch 12/20\n15000/15000 [==============================] - 2s 146us/step - loss: 0.0342 - binary_accuracy: 0.9925 - val_loss: 0.4597 - val_binary_accuracy: 0.8700\nEpoch 13/20\n15000/15000 [==============================] - 2s 150us/step - loss: 0.0260 - binary_accuracy: 0.9945 - val_loss: 0.4827 - val_binary_accuracy: 0.8737\nEpoch 14/20\n15000/15000 [==============================] - 2s 148us/step - loss: 0.0202 - binary_accuracy: 0.9964 - val_loss: 0.5139 - val_binary_accuracy: 0.8716\nEpoch 15/20\n15000/15000 [==============================] - 2s 150us/step - loss: 0.0154 - binary_accuracy: 0.9979 - val_loss: 0.5438 - val_binary_accuracy: 0.8698\nEpoch 16/20\n15000/15000 [==============================] - 2s 144us/step - loss: 0.0167 - binary_accuracy: 0.9963 - val_loss: 0.5753 - val_binary_accuracy: 0.8692\nEpoch 17/20\n15000/15000 [==============================] - 2s 152us/step - loss: 0.0074 - binary_accuracy: 0.9997 - val_loss: 0.6190 - val_binary_accuracy: 0.8648\nEpoch 18/20\n15000/15000 [==============================] - 2s 148us/step - loss: 0.0097 - binary_accuracy: 0.9983 - val_loss: 0.6423 - val_binary_accuracy: 0.8657\nEpoch 19/20\n15000/15000 [==============================] - 2s 136us/step - loss: 0.0043 - binary_accuracy: 0.9999 - val_loss: 0.6813 - val_binary_accuracy: 0.8649\nEpoch 20/20\n15000/15000 [==============================] - 2s 128us/step - loss: 0.0066 - binary_accuracy: 0.9991 - val_loss: 0.7073 - val_binary_accuracy: 0.8659\n" ] ], [ [ "## Plotting the training and validation loss", "_____no_output_____" ] ], [ [ "history_dict = history.history\nloss_values = history_dict['loss']\nval_loss_values = history_dict['val_loss']\nepochs = range(1, len(loss_values) + 1)\n\nplt.plot(epochs, loss_values, 'bo', label='Training loss')\nplt.plot(epochs, val_loss_values, 'b', label='Validation loss')\nplt.title('Training and validation loss')\nplt.xlabel('Epochs')\nplt.ylabel('Loss')\nplt.legend()\n\nplt.show()", "_____no_output_____" ] ], [ [ "## Plotting the training and validation accuracy", "_____no_output_____" ] ], [ [ "plt.clf()\n\nacc = history_dict['binary_accuracy']\nval_acc = history_dict['val_binary_accuracy']\nplt.plot(epochs, acc, 'bo', label='Training acc')\nplt.plot(epochs, val_acc, 'b', label='Validation acc')\nplt.title('Training and validation accuracy')\nplt.xlabel('Epochs')\nplt.ylabel('Acc')\nplt.legend()\nplt.show()", "_____no_output_____" ] ], [ [ "## Retraining model from scratch", "_____no_output_____" ] ], [ [ "odel = models.Sequential()\nmodel.add(layers.Dense(16, activation='relu', input_shape=(10000,)))\nmodel.add(layers.Dense(16, activation='relu'))\nmodel.add(layers.Dense(1, activation='sigmoid'))\n\nmodel.compile(\n optimizer='rmsprop',\n loss=losses.binary_crossentropy,\n metrics=[metrics.binary_accuracy]\n)\nmodel.fit(x_train, y_train, epochs=4, batch_size=512)\nresults = model.evaluate(x_test, y_test)", "Epoch 1/4\n25000/25000 [==============================] - 3s 107us/step - loss: 0.6321 - binary_accuracy: 0.5000\nEpoch 2/4\n25000/25000 [==============================] - 2s 91us/step - loss: 0.5317 - binary_accuracy: 0.9140\nEpoch 3/4\n25000/25000 [==============================] - 2s 89us/step - loss: 0.4189 - binary_accuracy: 0.9772\nEpoch 4/4\n25000/25000 [==============================] - 2s 88us/step - loss: 0.3093 - binary_accuracy: 0.9790\n25000/25000 [==============================] - 1s 47us/step\n" ], [ "results", "_____no_output_____" ], [ "model.predict(x_test)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
cbfeb73a34909404fe1607c7a41b4c397db3213f
53,197
ipynb
Jupyter Notebook
course_docs/numericalHW/ContourPlot.ipynb
lancejnelson/PH220
30d0b1748979a1aa407a7118bd9edf0632394c08
[ "MIT", "BSD-3-Clause" ]
1
2020-12-05T04:48:18.000Z
2020-12-05T04:48:18.000Z
course_docs/numericalHW/ContourPlot.ipynb
lancejnelson/PH220
30d0b1748979a1aa407a7118bd9edf0632394c08
[ "MIT", "BSD-3-Clause" ]
null
null
null
course_docs/numericalHW/ContourPlot.ipynb
lancejnelson/PH220
30d0b1748979a1aa407a7118bd9edf0632394c08
[ "MIT", "BSD-3-Clause" ]
null
null
null
470.769912
49,876
0.943098
[ [ [ "# Overview\n\nSo far, the numerical problems have taught you how to:\n\n1. Plot one-dimensional function. (simple plots)\n2. Plot vector fields. (So you can visualize the electric field)\n3. 3D plots, or functions of two variables. (So you can visualize voltage functions.)\n\nThis week you will learn how to generate contour plots. Together with 3D plots, contour plots should help you get a good sense of the shape of electric potential functions.\n\n\n## A simple example.\n\nLet's see if we can generate a contour plot for the electric potential due to just a single point charge, located at $\\vec{r} = (1,1)$ m. \n\n", "_____no_output_____" ] ], [ [ "from numpy import arange,meshgrid,sqrt,array\nfrom numpy.linalg import norm\nfrom matplotlib import pyplot\nfrom mpl_toolkits.mplot3d import Axes3D\n\nx = arange(-2,2,.01)\ny = arange(-2,2,.01)\nX,Y = meshgrid(x,y)\n\nk = 8.99e9\nq = 100e-9\n\nrCharge = array([1,1])\n\nV = k * q/sqrt((X-rCharge[0])**2 + (Y-rCharge[1])**2)\nfig,ax = pyplot.subplots(figsize = (10,10))\ncontourLocations = [100,250,500,1000,2000,5000]\nCP = ax.contour(X,Y,V,contourLocations) \nax.clabel(CP, inline=True, fontsize=10)\npyplot.show()", "_____no_output_____" ] ], [ [ "## For you to do\n\n1. Look over the code above and see if you can determine what each line does. Use print statements to see exactly what is happening if you need to. Then add comments next to each line.\n2. Create a contour plot of the three-point-charge example situation from last week. Look at both plots (the one from last week and the one you just generated) and convince yourself that the contour plot is indeed communicating the same information as the 3D plot from last week.\n3. Use your code from the 12-point charge problem from last week and modify it to create a contour plot of that situation. Compare to the 3D plot you generated last week and verify that it is communicating the same information.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ] ]
cbfebb1b3c82dfff3b302c224944b1e7a1fe1970
4,273
ipynb
Jupyter Notebook
shading.ipynb
UttamBasu/ipygany_examples
5b619951977a0e89f2c63a172d55a637519231d8
[ "MIT" ]
null
null
null
shading.ipynb
UttamBasu/ipygany_examples
5b619951977a0e89f2c63a172d55a637519231d8
[ "MIT" ]
null
null
null
shading.ipynb
UttamBasu/ipygany_examples
5b619951977a0e89f2c63a172d55a637519231d8
[ "MIT" ]
null
null
null
29.673611
413
0.552539
[ [ [ "%matplotlib inline\nfrom pyvista import set_plot_theme\nset_plot_theme('document')", "_____no_output_____" ] ], [ [ "Types of Shading {#shading_example}\n================\n\nComparison of default, flat shading vs. smooth shading.\n", "_____no_output_____" ] ], [ [ "# sphinx_gallery_thumbnail_number = 4\nimport pyvista\nfrom pyvista import examples", "_____no_output_____" ] ], [ [ "PyVista supports two types of shading: flat and smooth shading that uses\nVTK\\'s Phong shading algorithm.\n\nThis is a plot with the default flat shading.\n", "_____no_output_____" ] ], [ [ "mesh = examples.load_nut()\nmesh.plot()", "_____no_output_____" ] ], [ [ "Here\\'s the same sphere with smooth shading.\n", "_____no_output_____" ] ], [ [ "mesh.plot(smooth_shading=True)", "_____no_output_____" ] ], [ [ "Note how smooth shading makes edges that should be sharp look odd, it\\'s\nbecause the points of these normals are averaged between two faces that\nhave a sharp angle between them. You can avoid this by enabling\n`split_sharp_edges`.\n\n::: {.note}\n::: {.admonition-title}\nNote\n:::\n\nYou can configure the splitting angle with the optional `feature_angle`\nkeyword argument.\n:::\n", "_____no_output_____" ] ], [ [ "mesh.plot(smooth_shading=True, split_sharp_edges=True)", "_____no_output_____" ] ], [ [ "We can even plot the edges that will be split using\n`extract_feature_edges <pyvista.PolyDataFilters.extract_feature_edges>`{.interpreted-text\nrole=\"func\"}.\n", "_____no_output_____" ] ], [ [ "# extract the feature edges exceeding 30 degrees\nedges = mesh.extract_feature_edges(\n boundary_edges=False, non_manifold_edges=False,\n feature_angle=30, manifold_edges=False\n)\n\n# plot both the edges and the smoothed mesh\npl = pyvista.Plotter()\npl.enable_anti_aliasing()\npl.add_mesh(mesh, smooth_shading=True, split_sharp_edges=True)\npl.add_mesh(edges, color='k', line_width=5)\npl.show()", "_____no_output_____" ] ], [ [ "The `split_sharp_edges` keyword argument is compatible with physically\nbased rendering as well.\n", "_____no_output_____" ] ], [ [ "# plot both the edges and the smoothed mesh\npl = pyvista.Plotter()\npl.enable_anti_aliasing()\npl.add_mesh(mesh, color='w', split_sharp_edges=True, pbr=True,\n metallic=1.0, roughness=0.5)\npl.show()", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cbfebdbc140ae80ee0de344872cfe85df13d6086
61,001
ipynb
Jupyter Notebook
nli_01_task_and_data.ipynb
SeungYeon-Chung/cs224u
c03e590cebabcc881d7d250be3760a3216fb9191
[ "Apache-2.0" ]
null
null
null
nli_01_task_and_data.ipynb
SeungYeon-Chung/cs224u
c03e590cebabcc881d7d250be3760a3216fb9191
[ "Apache-2.0" ]
null
null
null
nli_01_task_and_data.ipynb
SeungYeon-Chung/cs224u
c03e590cebabcc881d7d250be3760a3216fb9191
[ "Apache-2.0" ]
null
null
null
63.213472
14,374
0.759774
[ [ [ "# Natural language inference: task and datasets", "_____no_output_____" ] ], [ [ "__author__ = \"Christopher Potts\"\n__version__ = \"CS224u, Stanford, Fall 2020\"", "_____no_output_____" ] ], [ [ "## Contents\n\n1. [Overview](#Overview)\n1. [Our version of the task](#Our-version-of-the-task)\n1. [Primary resources](#Primary-resources)\n1. [Set-up](#Set-up)\n1. [SNLI](#SNLI)\n 1. [SNLI properties](#SNLI-properties)\n 1. [Working with SNLI](#Working-with-SNLI)\n1. [MultiNLI](#MultiNLI)\n 1. [MultiNLI properties](#MultiNLI-properties)\n 1. [Working with MultiNLI](#Working-with-MultiNLI)\n 1. [Annotated MultiNLI subsets](#Annotated-MultiNLI-subsets)\n1. [Adversarial NLI](#Adversarial-NLI)\n 1. [Adversarial NLI properties](#Adversarial-NLI-properties)\n 1. [Working with Adversarial NLI](#Working-with-Adversarial-NLI)\n1. [Other NLI datasets](#Other-NLI-datasets)", "_____no_output_____" ], [ "## Overview\n\nNatural Language Inference (NLI) is the task of predicting the logical relationships between words, phrases, sentences, (paragraphs, documents, ...). Such relationships are crucial for all kinds of reasoning in natural language: arguing, debating, problem solving, summarization, and so forth.\n\n[Dagan et al. (2006)](https://u.cs.biu.ac.il/~nlp/RTE1/Proceedings/dagan_et_al.pdf), one of the foundational papers on NLI (also called Recognizing Textual Entailment; RTE), make a case for the generality of this task in NLU:\n\n> It seems that major inferences, as needed by multiple applications, can indeed be cast in terms of textual entailment. For example, __a QA system__ has to identify texts that entail a hypothesized answer. [...] Similarly, for certain __Information Retrieval__ queries the combination of semantic concepts and relations denoted by the query should be entailed from relevant retrieved documents. [...] In __multi-document summarization__ a redundant sentence, to be omitted from the summary, should be entailed from other sentences in the summary. And in __MT evaluation__ a correct translation should be semantically equivalent to the gold standard translation, and thus both translations should entail each other. Consequently, we hypothesize that textual entailment recognition is a suitable generic task for evaluating and comparing applied semantic inference models. Eventually, such efforts can promote the development of entailment recognition \"engines\" which may provide useful generic modules across applications.", "_____no_output_____" ], [ "## Our version of the task\n\nOur NLI data will look like this:\n\n| Premise | Relation | Hypothesis |\n|:--------|:---------------:|:------------|\n| turtle | contradiction | linguist |\n| A turtled danced | entails | A turtle moved |\n| Every reptile danced | entails | Every turtle moved |\n| Some turtles walk | contradicts | No turtles move |\n| James Byron Dean refused to move without blue jeans | entails | James Dean didn't dance without pants |\n\nIn the [word-entailment bakeoff](hw_wordentail.ipynb), we study a special case of this where the premise and hypothesis are single words. This notebook begins to introduce the problem of NLI more fully.", "_____no_output_____" ], [ "## Primary resources\n\nWe're going to focus on three NLI corpora:\n\n* [The Stanford Natural Language Inference corpus (SNLI)](https://nlp.stanford.edu/projects/snli/)\n* [The Multi-Genre NLI Corpus (MultiNLI)](https://www.nyu.edu/projects/bowman/multinli/)\n* [The Adversarial NLI Corpus (ANLI)](https://github.com/facebookresearch/anli)\n\nThe first was collected by a group at Stanford, led by [Sam Bowman](https://www.nyu.edu/projects/bowman/), and the second was collected by a group at NYU, also led by [Sam Bowman](https://www.nyu.edu/projects/bowman/). Both have the same format and were crowdsourced using the same basic methods. However, SNLI is entirely focused on image captions, whereas MultiNLI includes a greater range of contexts.\n\nThe third corpus was collected by a group at Facebook AI and UNC Chapel Hill. The team's goal was to address the fact that datasets like SNLI and MultiNLI seem to be artificially easy – models trained on them can often surpass stated human performance levels but still fail on examples that are simple and intuitive for people. The dataset is \"Adversarial\" because the annotators were asked to try to construct examples that fooled strong models but still passed muster with other human readers.\n\nThis notebook presents tools for working with these corpora. The [second notebook in the unit](nli_02_models.ipynb) concerns models of NLI.", "_____no_output_____" ], [ "## Set-up\n\n* As usual, you need to be fully set up to work with [the CS224u repository](https://github.com/cgpotts/cs224u/).\n\n* If you haven't already, download [the course data](http://web.stanford.edu/class/cs224u/data/data.tgz), unpack it, and place it in the directory containing the course repository – the same directory as this notebook. (If you want to put it somewhere else, change `DATA_HOME` below.)", "_____no_output_____" ] ], [ [ "import nli\nimport os\nimport pandas as pd\nimport random", "_____no_output_____" ], [ "DATA_HOME = os.path.join(\"data\", \"nlidata\")\n\nSNLI_HOME = os.path.join(DATA_HOME, \"snli_1.0\")\n\nMULTINLI_HOME = os.path.join(DATA_HOME, \"multinli_1.0\")\n\nANNOTATIONS_HOME = os.path.join(DATA_HOME, \"multinli_1.0_annotations\")\n\nANLI_HOME = os.path.join(DATA_HOME, \"anli_v1.0\")", "_____no_output_____" ] ], [ [ "## SNLI", "_____no_output_____" ], [ "### SNLI properties", "_____no_output_____" ], [ "For SNLI (and MultiNLI), MTurk annotators were presented with premise sentences and asked to produce new sentences that entailed, contradicted, or were neutral with respect to the premise. A subset of the examples were then validated by an additional four MTurk annotators.", "_____no_output_____" ], [ "* All the premises are captions from the [Flickr30K corpus](http://shannon.cs.illinois.edu/DenotationGraph/).\n\n\n* Some of the sentences rather depressingly reflect stereotypes ([Rudinger et al. 2017](https://aclanthology.coli.uni-saarland.de/papers/W17-1609/w17-1609)).\n\n\n* 550,152 train examples; 10K dev; 10K test\n\n\n* Mean length in tokens:\n * Premise: 14.1\n * Hypothesis: 8.3\n\n* Clause-types\n * Premise S-rooted: 74%\n * Hypothesis S-rooted: 88.9%\n\n\n* Vocab size: 37,026\n\n\n* 56,951 examples validated by four additional annotators\n * 58.3% examples with unanimous gold label\n * 91.2% of gold labels match the author's label\n * 0.70 overall Fleiss kappa\n\n\n* Top scores currently around 90%. ", "_____no_output_____" ], [ "### Working with SNLI", "_____no_output_____" ], [ "The following readers should make it easy to work with SNLI:\n \n* `nli.SNLITrainReader`\n* `nli.SNLIDevReader`\n\nWriting a `Test` reader is easy and so left to the user who decides that a test-set evaluation is appropriate. We omit that code as a subtle way of discouraging use of the test set during project development.\n\nThe base class, `nli.NLIReader`, is used by all the readers discussed here.\n\nBecause the datasets are so large, it is often useful to be able to randomly sample from them. All of the reader classes discussed here support this with their keyword argument `samp_percentage`. For example, the following samples approximately 10% of the examples from the SNLI training set:", "_____no_output_____" ] ], [ [ "nli.SNLITrainReader(SNLI_HOME, samp_percentage=0.10, random_state=42)", "_____no_output_____" ] ], [ [ "The precise number of examples will vary somewhat because of the way the sampling is done. (Here, we choose efficiency over precision in the number of cases we return; see the implementation for details.)", "_____no_output_____" ], [ "All of the readers have a `read` method that yields `NLIExample` example instances. For SNLI, these have the following attributes:\n\n* __annotator_labels__: `list of str`\n* __captionID__: `str`\n* __gold_label__: `str`\n* __pairID__: `str`\n* __sentence1__: `str`\n* __sentence1_binary_parse__: `nltk.tree.Tree`\n* __sentence1_parse__: `nltk.tree.Tree`\n* __sentence2__: `str`\n* __sentence2_binary_parse__: `nltk.tree.Tree`\n* __sentence2_parse__: `nltk.tree.Tree`", "_____no_output_____" ], [ "The following creates the label distribution for the training data:", "_____no_output_____" ] ], [ [ "snli_labels = pd.Series(\n [ex.gold_label for ex in nli.SNLITrainReader(\n SNLI_HOME, filter_unlabeled=False).read()])\n\nsnli_labels.value_counts()", "_____no_output_____" ] ], [ [ "Use `filter_unlabeled=True` (the default) to silently drop the examples for which `gold_label` is `-`.", "_____no_output_____" ], [ "Let's look at a specific example in some detail:", "_____no_output_____" ] ], [ [ "snli_iterator = iter(nli.SNLITrainReader(SNLI_HOME).read())", "_____no_output_____" ], [ "snli_ex = next(snli_iterator)", "_____no_output_____" ], [ "print(snli_ex)", "\"NLIExample({'annotator_labels': ['neutral'], 'captionID': '3416050480.jpg#4', 'gold_label': 'neutral', 'pairID': '3416050480.jpg#4r1n', 'sentence1': 'A person on a horse jumps over a broken down airplane.', 'sentence1_binary_parse': Tree('X', [Tree('X', [Tree('X', ['A', 'person']), Tree('X', ['on', Tree('X', ['a', 'horse'])])]), Tree('X', [Tree('X', ['jumps', Tree('X', ['over', Tree('X', ['a', Tree('X', ['broken', Tree('X', ['down', 'airplane'])])])])]), '.'])]), 'sentence1_parse': Tree('ROOT', [Tree('S', [Tree('NP', [Tree('NP', [Tree('DT', ['A']), Tree('NN', ['person'])]), Tree('PP', [Tree('IN', ['on']), Tree('NP', [Tree('DT', ['a']), Tree('NN', ['horse'])])])]), Tree('VP', [Tree('VBZ', ['jumps']), Tree('PP', [Tree('IN', ['over']), Tree('NP', [Tree('DT', ['a']), Tree('JJ', ['broken']), Tree('JJ', ['down']), Tree('NN', ['airplane'])])])]), Tree('.', ['.'])])]), 'sentence2': 'A person is training his horse for a competition.', 'sentence2_binary_parse': Tree('X', [Tree('X', ['A', 'person']), Tree('X', [Tree('X', ['is', Tree('X', [Tree('X', ['training', Tree('X', ['his', 'horse'])]), Tree('X', ['for', Tree('X', ['a', 'competition'])])])]), '.'])]), 'sentence2_parse': Tree('ROOT', [Tree('S', [Tree('NP', [Tree('DT', ['A']), Tree('NN', ['person'])]), Tree('VP', [Tree('VBZ', ['is']), Tree('VP', [Tree('VBG', ['training']), Tree('NP', [Tree('PRP$', ['his']), Tree('NN', ['horse'])]), Tree('PP', [Tree('IN', ['for']), Tree('NP', [Tree('DT', ['a']), Tree('NN', ['competition'])])])])]), Tree('.', ['.'])])])})\n" ] ], [ [ "As you can see from the above attribute list, there are __three versions__ of the premise and hypothesis sentences:\n\n1. Regular string representations of the data\n1. Unlabeled binary parses \n1. Labeled parses", "_____no_output_____" ] ], [ [ "snli_ex.sentence1", "_____no_output_____" ] ], [ [ "The binary parses lack node labels; so that we can use `nltk.tree.Tree` with them, the label `X` is added to all of them:", "_____no_output_____" ] ], [ [ "snli_ex.sentence1_binary_parse", "_____no_output_____" ] ], [ [ "Here's the full parse tree with syntactic categories:", "_____no_output_____" ] ], [ [ "snli_ex.sentence1_parse", "_____no_output_____" ] ], [ [ "The leaves of either tree are tokenized versions of them:", "_____no_output_____" ] ], [ [ "snli_ex.sentence1_parse.leaves()", "_____no_output_____" ] ], [ [ "## MultiNLI", "_____no_output_____" ], [ "### MultiNLI properties\n\n\n* Train premises drawn from five genres: \n 1. Fiction: works from 1912–2010 spanning many genres\n 1. Government: reports, letters, speeches, etc., from government websites\n 1. The _Slate_ website\n 1. Telephone: the Switchboard corpus\n 1. Travel: Berlitz travel guides\n\n\n* Additional genres just for dev and test (the __mismatched__ condition): \n 1. The 9/11 report\n 1. Face-to-face: The Charlotte Narrative and Conversation Collection\n 1. Fundraising letters\n 1. Non-fiction from Oxford University Press\n 1. _Verbatim_ articles about linguistics\n\n\n* 392,702 train examples; 20K dev; 20K test\n\n\n* 19,647 examples validated by four additional annotators\n * 58.2% examples with unanimous gold label\n * 92.6% of gold labels match the author's label\n\n\n* Test-set labels available as a Kaggle competition. \n\n * Top matched scores currently around 0.81.\n * Top mismatched scores currently around 0.83.", "_____no_output_____" ], [ "### Working with MultiNLI", "_____no_output_____" ], [ "For MultiNLI, we have the following readers: \n\n* `nli.MultiNLITrainReader`\n* `nli.MultiNLIMatchedDevReader`\n* `nli.MultiNLIMismatchedDevReader`\n\nThe MultiNLI test sets are available on Kaggle ([matched version](https://www.kaggle.com/c/multinli-matched-open-evaluation) and [mismatched version](https://www.kaggle.com/c/multinli-mismatched-open-evaluation)).", "_____no_output_____" ], [ "The interface to these is the same as for the SNLI readers:", "_____no_output_____" ] ], [ [ "nli.MultiNLITrainReader(MULTINLI_HOME, samp_percentage=0.10, random_state=42)", "_____no_output_____" ] ], [ [ "The `NLIExample` instances for MultiNLI have the same attributes as those for SNLI. Here is the list repeated from above for convenience:\n\n* __annotator_labels__: `list of str`\n* __captionID__: `str`\n* __gold_label__: `str`\n* __pairID__: `str`\n* __sentence1__: `str`\n* __sentence1_binary_parse__: `nltk.tree.Tree`\n* __sentence1_parse__: `nltk.tree.Tree`\n* __sentence2__: `str`\n* __sentence2_binary_parse__: `nltk.tree.Tree`\n* __sentence2_parse__: `nltk.tree.Tree`", "_____no_output_____" ], [ "The full label distribution:", "_____no_output_____" ] ], [ [ "multinli_labels = pd.Series(\n [ex.gold_label for ex in nli.MultiNLITrainReader(\n MULTINLI_HOME, filter_unlabeled=False).read()])\n\nmultinli_labels.value_counts()", "_____no_output_____" ] ], [ [ "No examples in the MultiNLI train set lack a gold label, so the value of the `filter_unlabeled` parameter has no effect here, but it does have an effect in the `Dev` versions.", "_____no_output_____" ], [ "### Annotated MultiNLI subsets\n\nMultiNLI includes additional annotations for a subset of the dev examples. The goal is to help people understand how well their models are doing on crucial NLI-related linguistic phenomena.", "_____no_output_____" ] ], [ [ "matched_ann_filename = os.path.join(\n ANNOTATIONS_HOME,\n \"multinli_1.0_matched_annotations.txt\")\n\nmismatched_ann_filename = os.path.join(\n ANNOTATIONS_HOME,\n \"multinli_1.0_mismatched_annotations.txt\")", "_____no_output_____" ], [ "def view_random_example(annotations, random_state=42):\n random.seed(random_state)\n ann_ex = random.choice(list(annotations.items()))\n pairid, ann_ex = ann_ex\n ex = ann_ex['example']\n print(\"pairID: {}\".format(pairid))\n print(ann_ex['annotations'])\n print(ex.sentence1)\n print(ex.gold_label)\n print(ex.sentence2)", "_____no_output_____" ], [ "matched_ann = nli.read_annotated_subset(matched_ann_filename, MULTINLI_HOME)", "_____no_output_____" ], [ "view_random_example(matched_ann)", "pairID: 63218c\n[]\nRecently, however, I have settled down and become decidedly less experimental.\ncontradiction\nI am still as experimental as ever, and I am always on the move.\n" ] ], [ [ "## Adversarial NLI", "_____no_output_____" ], [ "### Adversarial NLI properties\n\nThe ANLI dataset was created in response to evidence that datasets like SNLI and MultiNLI are artificially easy for modern machine learning models to solve. The team sought to tackle this weakness head-on, by designing a crowdsourcing task in which annotators were explicitly trying to confuse state-of-the-art models. In broad outline, the task worked like this:\n\n1. The crowdworker is presented with a premise (context) text and asked to construct a hypothesis sentence that entails, contradicts, or is neutral with respect to that premise. (The actual wording is more informal, along the lines of the SNLI/MultiNLI task).\n\n1. The crowdworker submits a hypothesis text.\n\n1. The premise/hypothesis pair is fed to a trained model that makes a prediction about the correct NLI label.\n\n1. If the model's prediction is correct, then the crowdworker loops back to step 2 to try again. If the model's prediction is incorrect, then the example is validated by different crowdworkers.\n\nThe dataset consists of three rounds, each involving a different model and a different set of sources for the premise texts:\n\n| Round | Model | Training data | Context sources | \n|:------:|:------------|:---------------------------|:-----------------|\n| 1 | [BERT-large](https://www.aclweb.org/anthology/N19-1423/) | SNLI + MultiNLI | Wikipedia |\n| 2 | [ROBERTa](https://arxiv.org/abs/1907.11692) | SNLI + MultiNLI + [NLI-FEVER](https://github.com/easonnie/combine-FEVER-NSMN/blob/master/other_resources/nli_fever.md) + Round 1 | Wikipedia |\n| 3 | [ROBERTa](https://arxiv.org/abs/1907.11692) | SNLI + MultiNLI + [NLI-FEVER](https://github.com/easonnie/combine-FEVER-NSMN/blob/master/other_resources/nli_fever.md) + Round 2 | Various |\n\nEach round has train/dev/test splits. The sizes of these splits and their label distributions are calculated just below.\n\nThe [project README](https://github.com/facebookresearch/anli/blob/master/README.md) seeks to establish some rules for how the rounds can be used for training and evaluation.", "_____no_output_____" ], [ "### Working with Adversarial NLI", "_____no_output_____" ], [ "For ANLI, we have the following readers: \n\n* `nli.ANLITrainReader`\n* `nli.ANLIDevReader`\n\nAs with SNLI, we leave the writing of a `Test` version to the user, as a way of discouraging inadvertent use of the test set during project development.", "_____no_output_____" ], [ "Because ANLI is distributed in three rounds, and the rounds can be used independently or pooled, the interface has a `rounds` argument. The default is `rounds=(1,2,3)`, but any subset of them can be specified. Here are some illustrations using the `Train` reader; the `Dev` interface is the same:", "_____no_output_____" ] ], [ [ "for rounds in ((1,), (2,), (3,), (1,2,3)):\n count = len(list(nli.ANLITrainReader(ANLI_HOME, rounds=rounds).read()))\n print(\"R{0:}: {1:,}\".format(rounds, count))", "R(1,): 16,946\nR(2,): 45,460\nR(3,): 100,459\nR(1, 2, 3): 162,865\n" ] ], [ [ "The above figures correspond to those in Table 2 of the paper. I am not sure what accounts for the differences of 100 examples in round 2 (and, in turn, in the grand total).", "_____no_output_____" ], [ "ANLI uses a different set of attributes from SNLI/MultiNLI. Here is a summary of what `NLIExample` instances offer for this corpus:\n\n* __uid__: a unique identifier; akin to `pairID` in SNLI/MultiNLI \n* __context__: the premise; corresponds to `sentence1` in SNLI/MultiNLI\n* __hypothesis__: the hypothesis; corresponds to `sentence2` in SNLI/MultiNLI\n* __label__: the gold label; corresponds to `gold_label` in SNLI/MultiNLI\n* __model_label__: the label predicted by the model used in the current round\n* __reason__: a crowdworker's free-text hypothesis about why the model made an incorrect prediction for the current __context__/__hypothesis__ pair\n* __emturk__: for dev (and test), this is `True` if the annotator contributed only dev (test) exmples, else `False`; in turn, it is `False` for all train examples.\n* __genre__: the source for the __context__ text\n* __tag__: information about the round and train/dev/test classification\n\nAll these attribute are `str`-valued except for `emturk`, which is `bool`-valued.", "_____no_output_____" ], [ "The labels in this dataset are conceptually the same as for `SNLI/MultiNLI`, but they are encoded differently:", "_____no_output_____" ] ], [ [ "anli_labels = pd.Series(\n [ex.label for ex in nli.ANLITrainReader(ANLI_HOME).read()])\n\nanli_labels.value_counts()", "_____no_output_____" ] ], [ [ "For the dev set, the `label` and `model_label` values are always different, suggesting that these evaluations will be very challenging for present-day models:", "_____no_output_____" ] ], [ [ "pd.Series(\n [ex.label == ex.model_label for ex in nli.ANLIDevReader(ANLI_HOME).read()]\n).value_counts()", "_____no_output_____" ] ], [ [ "In the train set, they do sometimes correspond, and you can track the changes in the rate of correct model predictions across the rounds:", "_____no_output_____" ] ], [ [ "for r in (1,2,3):\n dist = pd.Series(\n [ex.label == ex.model_label\n for ex in nli.ANLITrainReader(ANLI_HOME, rounds=(r,)).read()]\n ).value_counts()\n dist = dist / dist.sum()\n dist.name = \"Round {}\".format(r)\n print(dist, end=\"\\n\\n\")", "True 0.821197\nFalse 0.178803\nName: Round 1, dtype: float64\n\nTrue 0.932028\nFalse 0.067972\nName: Round 2, dtype: float64\n\nTrue 0.915916\nFalse 0.084084\nName: Round 3, dtype: float64\n\n" ] ], [ [ "This corresponds to Table 2, \"Model error rate (Verified)\", in the paper. (I am not sure what accounts for the slight differences in the percentages.)", "_____no_output_____" ], [ "## Other NLI datasets", "_____no_output_____" ], [ "* [The FraCaS textual inference test suite](http://www-nlp.stanford.edu/~wcmac/downloads/) is a smaller, hand-built dataset that is great for evaluating a model's ability to handle complex logical patterns.\n\n* [SemEval 2013](https://www.cs.york.ac.uk/semeval-2013/) had a wide range of interesting data sets for NLI and related tasks.\n\n* [The SemEval 2014 semantic relatedness shared task](http://alt.qcri.org/semeval2014/task1/) used an NLI dataset called [Sentences Involving Compositional Knowledge (SICK)](http://alt.qcri.org/semeval2014/task1/index.php?id=data-and-tools).\n\n* [MedNLI](https://physionet.org/physiotools/mimic-code/mednli/) is specialized to the medical domain, using data derived from [MIMIC III](https://mimic.physionet.org).\n\n* [XNLI](https://github.com/facebookresearch/XNLI) is a multilingual collection of test sets derived from MultiNLI.\n\n* [Diverse Natural Language Inference Collection (DNC)](http://decomp.io/projects/diverse-natural-language-inference/) transforms existing annotations from other tasks into NLI problems for a diverse range of reasoning challenges.\n\n* [SciTail](http://data.allenai.org/scitail/) is an NLI dataset derived from multiple-choice science exam questions and Web text.\n\n* [NLI Style FEVER](https://github.com/easonnie/combine-FEVER-NSMN/blob/master/other_resources/nli_fever.md) is a version of [the FEVER dataset](http://fever.ai) put into a standard NLI format. It was used by the Adversarial NLI team to train models for their annotation round 2.\n\n* Models for NLI might be adapted for use with [the 30M Factoid Question-Answer Corpus](http://agarciaduran.org/).\n\n* Models for NLI might be adapted for use with [the Penn Paraphrase Database](http://paraphrase.org/).", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ] ]
cbfed1b29ad60eaec1267b42ac22ece0e7607c52
759,219
ipynb
Jupyter Notebook
src/python/plot_utils/Profiling.ipynb
isaac-ped/demikernel
6f372569e3599d8bf9083df6c25490c42af74c0d
[ "MIT" ]
null
null
null
src/python/plot_utils/Profiling.ipynb
isaac-ped/demikernel
6f372569e3599d8bf9083df6c25490c42af74c0d
[ "MIT" ]
null
null
null
src/python/plot_utils/Profiling.ipynb
isaac-ped/demikernel
6f372569e3599d8bf9083df6c25490c42af74c0d
[ "MIT" ]
null
null
null
146.87928
125,134
0.817697
[ [ [ "import glob\nimport os\nimport pandas as pd\nimport numpy as np\n\n##################### Traces description\n# 1. CLT_PUSH_START - SENDING Time between the scheduling of the request and its actual processing\n# 2. CLT_PUSH_END - CLT_PUSH_START Time to prepare the packet, send it to the NIC driver through rte_eth_tx_burst(), and free the dpdk mbuf\n# 3. SRV_POP_START - CLT_PUSH_END Time on the wire: item detected in the io queue's receive queue - client packet sent /!\\ I think this can be negative if the server schedule's pop way before the client sends requests\n# 4. SRV_POP_END - SRV_POP_START Time to parse incoming packet + \"waiting time\" at the server's queue\n# 5. NET_RECEIVE - SRV_POP_END Time between message delivered to the application by dmtr_wait_any() and packet processed by the I/O queue\n# 6. HTTP_DISPATCH - NET_RECEIVE Time taken to select the HTTP recipient (either RR, or apply the filter, etc)\n# 7. START_HTTP - HTTP_DISPATCH Time spent in memory queue between network component and HTTP\n# 8. END_HTTP - START_HTTP Time spent performing HTTP processing\n# 9. HTTP_DONE - END_HTTP Time spent in memory queue between HTTP component and HTPP /!\\ This include the \"wait time\" of dmtr_wait_any, as the same poll operates on both network sockets, and this memory queue\n# 10. SRV_PUSH_START - HTTP_DONE Time between the scheduling of the response and its actual processing\n# 11. SRV_PUSH_END - SRV_PUSH_START Time spent preparing the packet and sending it to the wire (identical to #2)\n# 12. CLT_POP_START - SRV_PUSH_END Time spent on the wire /!\\ I think this can be negative as the client schedules the read as soon as it as sent the request\n# 13. CLT_POP_END - CLT_POP_START Time spent processing an incoming network packet (includes wait time) (identical to #4)\n# 14. COMPLETED - CLT_POP_END Time ellapsed between the reponse being delivered to the client by dmtr_wait_any(), and the response's being fully processed by the I/O queue\n\nTRACE_ORDER = [\n 'SENDING',\n 'CLT_PUSH_START',\n# 'CLT_PUSH_END',\n# 'SRV_POP_START',\n 'SRV_POP_END',\n 'NET_RECEIVE',\n 'HTTP_DISPATCH',\n 'START_HTTP',\n 'END_HTTP',\n 'HTTP_DONE',\n 'SRV_PUSH_START',\n# 'SRV_PUSH_END',\n# 'CLT_POP_START',\n 'CLT_POP_END',\n 'COMPLETED'\n]\n\ndef read_tokens(trace_dir, exclude_first = 5):\n #REQ_ID\tSENDING\tREADING\tCOMPLETED\tPUSH_TOKEN\tPOP_TOKEN)\n files = glob.glob(os.path.join(trace_dir, '*traces*'))\n files = list(filter(lambda x: not ('POP' in x or 'PUSH' in x), files))\n if len(files) > 1:\n raise Exception(\"Too many files\")\n df = pd.read_csv(files[0], sep='\\t')\n \n min_time = df[df.columns[1]].min()\n df = df[df[df.columns[1]] > min_time + exclude_first * 1e9]\n return df\n\ndef read_traces(trace_dir, label):\n files = glob.glob(os.path.join(trace_dir, '*%s-traces' % label))\n if len(files) > 1:\n raise Exception(\"Too many files\")\n df = pd.read_csv(files[0], sep='\\t')\n return df\n\ndef merge_trace(token_df, trace_df, token_label, col_label):\n trace_df = trace_df[['%s_TOKEN' % token_label, 'TIME']]\n df = pd.merge(token_df, trace_df, on='%s_TOKEN' % token_label)\n# return df\n return df.rename(columns={'TIME': col_label})\n\ndef merge_traces(token_df, trace_df, token_label, col_label):\n start_df = trace_df[trace_df.START]\n stop_df = trace_df[~trace_df.START]\n\n df = merge_trace(token_df, start_df, token_label, '%s_%s_START' % (col_label, token_label))\n df = merge_trace(df, stop_df, token_label, '%s_%s_END' % (col_label, token_label))\n return df\n\ncol_labels = dict(client='CLT', server='SRV')\ntoken_labels = dict(client='rate_client', server='')\n\ndef order_cols(df, subtract_root = True):\n col_order = list(filter(lambda x: x in df.columns, TRACE_ORDER, ))\n df = df[['REQ_ID'] +col_order].set_index('REQ_ID')\n \n if subtract_root:\n df[col_order] = df[col_order].apply(lambda x: x - df[col_order[0]])\n return df\n\ndef read_profiling_node(base_dir, experiment, node_label):\n client_dir = os.path.join(base_dir, experiment, node_label)\n token_df = read_tokens(client_dir)\n push_df = read_traces(client_dir, 'PUSH')\n pop_df = read_traces(client_dir, 'POP')\n \n df = merge_traces(token_df, push_df, 'PUSH', col_labels[node_label])\n df = merge_traces(df, pop_df, 'POP', col_labels[node_label])\n \n return order_cols(df)\n \nCLIENT_RCV = 'CLT_POP_END'\nCLIENT_SND = 'CLT_PUSH_START'\n \ndef read_merged_profiling(base_dir, experiment):\n client_df = read_profiling_node(base_dir, experiment, 'client')\n server_df = read_profiling_node(base_dir, experiment, 'server')\n \n server_cols = server_df.columns\n client_cols = client_df.columns\n \n df = client_df.join(server_df)\n \n offset = df[CLIENT_SND]\n \n df[server_cols] = df[server_cols].apply(lambda x: x + offset)\n \n offset =( df[CLIENT_RCV] - df[server_cols[-1]]) / 2\n \n df[server_cols] = df[server_cols].apply(lambda x: x + offset)\n \n \n return order_cols(df.reset_index())", "_____no_output_____" ], [ "COLORS = [\"#700f00\",\n\"#013fb0\",\n\"#cbcd11\",\n\"#6b3a7d\",\n\"#ff392e\",\n\"#008eb2\",\n\"#ff8da5\",\n\"#000000\",\n\"#458f00\",\n\"#AAAAAA\",\n\"#123456\",\n\"#7192F1\",\n\"#013fb0\",\n'#777777',\n'#BBBBBB'\n]\n\ndef stacked_plot(df, full_sep=False):\n columns = df.columns\n print(columns)\n bottom = 0\n cols_so_far = []\n for prev_col, next_col, color in zip(columns, columns[1:], COLORS):\n if not full_sep:\n bottom = df[prev_col]\n plt.bar(df.index, df[next_col] - df[prev_col], 1, bottom=bottom, color=color, label=prev_col)\n if full_sep:\n bottom = (bottom + df[next_col]- df[prev_col]).max()\n \n", "_____no_output_____" ], [ "def plot_stacked_sample(df, sample_size=100, full_sep=False):\n df = df.sort_values(df.columns[-1])\n\n lowest = df.iloc[:sample_size]\n highest = df.iloc[-sample_size:]\n middlest = df.iloc[int(len(df) / 2 - sample_size / 2): int(len(df) / 2 + sample_size / 2)]\n\n plt.figure(figsize=(9.5, 4))\n ax1 = plt.subplot(131)\n stacked_plot(lowest.reset_index(drop=True), full_sep)\n ax2 = plt.subplot(132, sharey=ax1)\n stacked_plot(middlest.reset_index(drop=True), full_sep)\n plt.subplot(133, sharey=ax2)\n stacked_plot(highest.reset_index(drop=True), full_sep)\n plt.tight_layout()\n plt.subplots_adjust(top=.8)\n plt.sca(ax2)\n\n plt.legend(loc='lower center', bbox_to_anchor=(.5, 1), ncol=5)", "_____no_output_____" ], [ "df = read_merged_profiling('profiling', 'all_pinned_8')\nplot_stacked_sample(df, 200)", "_____no_output_____" ], [ "df = read_merged_profiling('profiling', 'all_pinned_14')\nplot_stacked_sample(df, 200)", "_____no_output_____" ], [ "df = read_merged_profiling('profiling', 'all_pinned_file_only')\nplot_stacked_sample(df, 200)", "_____no_output_____" ], [ "import numpy as np\n\ndef plot_correlations(df):\n columns = df.columns\n for prev_col, next_col, color in zip(columns, columns[1:], COLORS):\n diffs = df[next_col] - df[prev_col]\n \n x = diffs / diffs.max()\n y = df.COMPLETED - df[columns[0]]\n plt.plot(x, y, '.', markersize=.1, color=color, label=prev_col)\n mask = ~x.isna() & ~y.isna()\n p = np.polyfit(x[mask], y[mask], 1)\n \n plt.plot(x, p[0] + p[1]*x, '-', color=color)", "_____no_output_____" ], [ "df = read_merged_profiling('profiling', 'all_pinned_file_only')\ndf = df.sort_values('COMPLETED')\n# df = df.iloc[-5000:]\ndiffs = df.astype(float).diff(-1, axis=1)\ndiffs = diffs[diffs.columns[~diffs.isna().all().values]]\ndiffs['TOTAL'] = df['COMPLETED']\nplt.figure()\ncorr = diffs.corr()\ncorr[corr == 1] = 0\nplt.imshow(corr)\nplt.xticks(range(len(corr.columns)))\nplt.yticks(range(len(corr.columns)))\nplt.xlim([-.5, 10.5])\nplt.ylim([-.5, 10.5])\nplt.gca().set_xticklabels(corr.columns, rotation=-45, ha='left')\nplt.gca().set_yticklabels(corr.columns)\nplt.colorbar()\nplt.tight_layout()\n# plt.subplots_adjust(left=.3, top=.8)", "_____no_output_____" ], [ "df = read_merged_profiling('profiling', 'all_pinned_file_only')\ndf = df.sort_values('COMPLETED')\n# df = df.iloc[-5000:]\ndiffs = df.astype(float).diff(-1, axis=1)\ndiffs = diffs[diffs.columns[~diffs.isna().all().values]]\ndiffs['TOTAL'] = df['COMPLETED']\nplt.figure()\ncorr = diffs.cov()\n# corr[corr > .98] = 0\nplt.imshow(corr)\nplt.xticks(range(len(corr.columns)))\nplt.yticks(range(len(corr.columns)))\nplt.xlim([-.5, 10.5])\nplt.ylim([-.5, 10.5])\nplt.gca().set_xticklabels(corr.columns, rotation=-45, ha='left')\nplt.gca().set_yticklabels(corr.columns)\nplt.colorbar()\nplt.tight_layout()\n# plt.subplots_adjust(left=.3, top=.8)", "_____no_output_____" ], [ "df = read_merged_profiling('profiling', 'all_pinned_regex_only')\nplot_stacked_sample(df, 200)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfed867255aef83b8b049a2ff31390bd34ae47e
139,206
ipynb
Jupyter Notebook
.ipynb_checkpoints/P1-checkpoint.ipynb
haowu74/find_lane_line
2dc1ed40cfb5e0c62060b36fa88267b24caefdd9
[ "MIT" ]
null
null
null
.ipynb_checkpoints/P1-checkpoint.ipynb
haowu74/find_lane_line
2dc1ed40cfb5e0c62060b36fa88267b24caefdd9
[ "MIT" ]
null
null
null
.ipynb_checkpoints/P1-checkpoint.ipynb
haowu74/find_lane_line
2dc1ed40cfb5e0c62060b36fa88267b24caefdd9
[ "MIT" ]
null
null
null
240.42487
116,880
0.910011
[ [ [ "# Self-Driving Car Engineer Nanodegree\n\n\n## Project: **Finding Lane Lines on the Road** \n***\nIn this project, you will use the tools you learned about in the lesson to identify lane lines on the road. You can develop your pipeline on a series of individual images, and later apply the result to a video stream (really just a series of images). Check out the video clip \"raw-lines-example.mp4\" (also contained in this repository) to see what the output should look like after using the helper functions below. \n\nOnce you have a result that looks roughly like \"raw-lines-example.mp4\", you'll need to get creative and try to average and/or extrapolate the line segments you've detected to map out the full extent of the lane lines. You can see an example of the result you're going for in the video \"P1_example.mp4\". Ultimately, you would like to draw just one line for the left side of the lane, and one for the right.\n\nIn addition to implementing code, there is a brief writeup to complete. The writeup should be completed in a separate file, which can be either a markdown file or a pdf document. There is a [write up template](https://github.com/udacity/CarND-LaneLines-P1/blob/master/writeup_template.md) that can be used to guide the writing process. Completing both the code in the Ipython notebook and the writeup template will cover all of the [rubric points](https://review.udacity.com/#!/rubrics/322/view) for this project.\n\n---\nLet's have a look at our first image called 'test_images/solidWhiteRight.jpg'. Run the 2 cells below (hit Shift-Enter or the \"play\" button above) to display the image.\n\n**Note: If, at any point, you encounter frozen display windows or other confounding issues, you can always start again with a clean slate by going to the \"Kernel\" menu above and selecting \"Restart & Clear Output\".**\n\n---", "_____no_output_____" ], [ "**The tools you have are color selection, region of interest selection, grayscaling, Gaussian smoothing, Canny Edge Detection and Hough Tranform line detection. You are also free to explore and try other techniques that were not presented in the lesson. Your goal is piece together a pipeline to detect the line segments in the image, then average/extrapolate them and draw them onto the image for display (as below). Once you have a working pipeline, try it out on the video stream below.**\n\n---\n\n<figure>\n <img src=\"examples/line-segments-example.jpg\" width=\"380\" alt=\"Combined Image\" />\n <figcaption>\n <p></p> \n <p style=\"text-align: center;\"> Your output should look something like this (above) after detecting line segments using the helper functions below </p> \n </figcaption>\n</figure>\n <p></p> \n<figure>\n <img src=\"examples/laneLines_thirdPass.jpg\" width=\"380\" alt=\"Combined Image\" />\n <figcaption>\n <p></p> \n <p style=\"text-align: center;\"> Your goal is to connect/average/extrapolate line segments to get output like this</p> \n </figcaption>\n</figure>", "_____no_output_____" ], [ "**Run the cell below to import some packages. If you get an `import error` for a package you've already installed, try changing your kernel (select the Kernel menu above --> Change Kernel). Still have problems? Try relaunching Jupyter Notebook from the terminal prompt. Also, consult the forums for more troubleshooting tips.** ", "_____no_output_____" ], [ "## Import Packages", "_____no_output_____" ] ], [ [ "#importing some useful packages\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\nimport numpy as np\nimport cv2\n%matplotlib inline", "_____no_output_____" ] ], [ [ "## Read in an Image", "_____no_output_____" ] ], [ [ "#reading in an image\nimage = mpimg.imread('test_images/solidWhiteRight.jpg')\n\n#printing out some stats and plotting\nprint('This image is:', type(image), 'with dimensions:', image.shape)\nplt.imshow(image) # if you wanted to show a single color channel image called 'gray', for example, call as plt.imshow(gray, cmap='gray')", "This image is: <class 'numpy.ndarray'> with dimensions: (540, 960, 3)\n" ] ], [ [ "## Ideas for Lane Detection Pipeline", "_____no_output_____" ], [ "**Some OpenCV functions (beyond those introduced in the lesson) that might be useful for this project are:**\n\n`cv2.inRange()` for color selection \n`cv2.fillPoly()` for regions selection \n`cv2.line()` to draw lines on an image given endpoints \n`cv2.addWeighted()` to coadd / overlay two images\n`cv2.cvtColor()` to grayscale or change color\n`cv2.imwrite()` to output images to file \n`cv2.bitwise_and()` to apply a mask to an image\n\n**Check out the OpenCV documentation to learn about these and discover even more awesome functionality!**", "_____no_output_____" ], [ "## Helper Functions", "_____no_output_____" ], [ "Below are some helper functions to help get you started. They should look familiar from the lesson!", "_____no_output_____" ] ], [ [ "import math\n\ndef grayscale(img):\n \"\"\"Applies the Grayscale transform\n This will return an image with only one color channel\n but NOTE: to see the returned image as grayscale\n (assuming your grayscaled image is called 'gray')\n you should call plt.imshow(gray, cmap='gray')\"\"\"\n return cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)\n # Or use BGR2GRAY if you read an image with cv2.imread()\n # return cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n \ndef canny(img, low_threshold, high_threshold):\n \"\"\"Applies the Canny transform\"\"\"\n return cv2.Canny(img, low_threshold, high_threshold)\n\ndef gaussian_blur(img, kernel_size):\n \"\"\"Applies a Gaussian Noise kernel\"\"\"\n return cv2.GaussianBlur(img, (kernel_size, kernel_size), 0)\n\ndef region_of_interest(img, vertices):\n \"\"\"\n Applies an image mask.\n \n Only keeps the region of the image defined by the polygon\n formed from `vertices`. The rest of the image is set to black.\n \"\"\"\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image\n\n\ndef draw_lines(img, lines, color=[255, 0, 0], thickness=2):\n \"\"\"\n NOTE: this is the function you might want to use as a starting point once you want to \n average/extrapolate the line segments you detect to map out the full\n extent of the lane (going from the result shown in raw-lines-example.mp4\n to that shown in P1_example.mp4). \n \n Think about things like separating line segments by their \n slope ((y2-y1)/(x2-x1)) to decide which segments are part of the left\n line vs. the right line. Then, you can average the position of each of \n the lines and extrapolate to the top and bottom of the lane.\n \n This function draws `lines` with `color` and `thickness`. \n Lines are drawn on the image inplace (mutates the image).\n If you want to make the lines semi-transparent, think about combining\n this function with the weighted_img() function below\n \"\"\"\n for line in lines:\n for x1,y1,x2,y2 in line:\n cv2.line(img, (x1, y1), (x2, y2), color, thickness)\n\ndef hough_lines(img, rho, theta, threshold, min_line_len, max_line_gap):\n \"\"\"\n `img` should be the output of a Canny transform.\n \n Returns an image with hough lines drawn.\n \"\"\"\n lines = cv2.HoughLinesP(img, rho, theta, threshold, np.array([]), minLineLength=min_line_len, maxLineGap=max_line_gap)\n line_img = np.zeros((img.shape[0], img.shape[1], 3), dtype=np.uint8)\n draw_lines(line_img, lines)\n return line_img\n\n# Python 3 has support for cool math symbols.\n\ndef weighted_img(img, initial_img, α=0.8, β=1., γ=0.):\n \"\"\"\n `img` is the output of the hough_lines(), An image with lines drawn on it.\n Should be a blank image (all black) with lines drawn on it.\n \n `initial_img` should be the image before any processing.\n \n The result image is computed as follows:\n \n initial_img * α + img * β + γ\n NOTE: initial_img and img must be the same shape!\n \"\"\"\n return cv2.addWeighted(initial_img, α, img, β, γ)", "_____no_output_____" ] ], [ [ "## Test Images\n\nBuild your pipeline to work on the images in the directory \"test_images\" \n**You should make sure your pipeline works well on these images before you try the videos.**", "_____no_output_____" ] ], [ [ "import os\nos.listdir(\"test_images/\")", "_____no_output_____" ] ], [ [ "## Build a Lane Finding Pipeline\n\n", "_____no_output_____" ], [ "Build the pipeline and run your solution on all test_images. Make copies into the `test_images_output` directory, and you can use the images in your writeup report.\n\nTry tuning the various parameters, especially the low and high Canny thresholds as well as the Hough lines parameters.", "_____no_output_____" ] ], [ [ "# TODO: Build your pipeline that will draw lane lines on the test_images\n# then save them to the test_images_output directory.\nimport numpy\nkernel_size = 3\nlow_threshold = 150\nhigh_threshold = 250\nrho = 1 # distance resolution in pixels of the Hough grid\ntheta = np.pi * 90 /180 # angular resolution in radians of the Hough grid\nthreshold = 15 # minimum number of votes (intersections in Hough grid cell)\nmin_line_len = 10 #minimum number of pixels making up a line\nmax_line_gap = 100 # maximum gap in pixels between connectable line segments\nfor file in os.listdir(\"test_images/\"):\n imgfile = \"test_images/\" + file\n init_image = mpimg.imread(imgfile)\n ysize = image.shape[0]\n xsize = image.shape[1]\n triangle = numpy.array([[0, ysize-1], [xsize-1, ysize-1], [xsize/2, ysize/2]], numpy.int32)\n image = grayscale(init_image)\n image = gaussian_blur(image, kernel_size)\n image = canny(image, low_threshold, high_threshold)\n image = region_of_interest(image, [triangle])\n image = hough_lines(image, rho, theta, threshold, min_line_len, max_line_gap)\n image = weighted_img(image, init_image)\n mpimg.imsave(\"test_images_result/\" + file, image)", "_____no_output_____" ] ], [ [ "## Test on Videos\n\nYou know what's cooler than drawing lanes over images? Drawing lanes over video!\n\nWe can test our solution on two provided videos:\n\n`solidWhiteRight.mp4`\n\n`solidYellowLeft.mp4`\n\n**Note: if you get an import error when you run the next cell, try changing your kernel (select the Kernel menu above --> Change Kernel). Still have problems? Try relaunching Jupyter Notebook from the terminal prompt. Also, consult the forums for more troubleshooting tips.**\n\n**If you get an error that looks like this:**\n```\nNeedDownloadError: Need ffmpeg exe. \nYou can download it by calling: \nimageio.plugins.ffmpeg.download()\n```\n**Follow the instructions in the error message and check out [this forum post](https://discussions.udacity.com/t/project-error-of-test-on-videos/274082) for more troubleshooting tips across operating systems.**", "_____no_output_____" ] ], [ [ "# Import everything needed to edit/save/watch video clips\nfrom moviepy.editor import VideoFileClip\nfrom IPython.display import HTML", "_____no_output_____" ], [ "def process_image(image):\n # NOTE: The output you return should be a color image (3 channel) for processing video below\n # TODO: put your pipeline here,\n # you should return the final output (image where lines are drawn on lanes)\n\n return result", "_____no_output_____" ] ], [ [ "Let's try the one with the solid white lane on the right first ...", "_____no_output_____" ] ], [ [ "white_output = 'test_videos_output/solidWhiteRight.mp4'\n## To speed up the testing process you may want to try your pipeline on a shorter subclip of the video\n## To do so add .subclip(start_second,end_second) to the end of the line below\n## Where start_second and end_second are integer values representing the start and end of the subclip\n## You may also uncomment the following line for a subclip of the first 5 seconds\n##clip1 = VideoFileClip(\"test_videos/solidWhiteRight.mp4\").subclip(0,5)\nclip1 = VideoFileClip(\"test_videos/solidWhiteRight.mp4\")\nwhite_clip = clip1.fl_image(process_image) #NOTE: this function expects color images!!\n%time white_clip.write_videofile(white_output, audio=False)", "_____no_output_____" ] ], [ [ "Play the video inline, or if you prefer find the video in your filesystem (should be in the same directory) and play it in your video player of choice.", "_____no_output_____" ] ], [ [ "HTML(\"\"\"\n<video width=\"960\" height=\"540\" controls>\n <source src=\"{0}\">\n</video>\n\"\"\".format(white_output))", "_____no_output_____" ] ], [ [ "## Improve the draw_lines() function\n\n**At this point, if you were successful with making the pipeline and tuning parameters, you probably have the Hough line segments drawn onto the road, but what about identifying the full extent of the lane and marking it clearly as in the example video (P1_example.mp4)? Think about defining a line to run the full length of the visible lane based on the line segments you identified with the Hough Transform. As mentioned previously, try to average and/or extrapolate the line segments you've detected to map out the full extent of the lane lines. You can see an example of the result you're going for in the video \"P1_example.mp4\".**\n\n**Go back and modify your draw_lines function accordingly and try re-running your pipeline. The new output should draw a single, solid line over the left lane line and a single, solid line over the right lane line. The lines should start from the bottom of the image and extend out to the top of the region of interest.**", "_____no_output_____" ], [ "Now for the one with the solid yellow lane on the left. This one's more tricky!", "_____no_output_____" ] ], [ [ "yellow_output = 'test_videos_output/solidYellowLeft.mp4'\n## To speed up the testing process you may want to try your pipeline on a shorter subclip of the video\n## To do so add .subclip(start_second,end_second) to the end of the line below\n## Where start_second and end_second are integer values representing the start and end of the subclip\n## You may also uncomment the following line for a subclip of the first 5 seconds\n##clip2 = VideoFileClip('test_videos/solidYellowLeft.mp4').subclip(0,5)\nclip2 = VideoFileClip('test_videos/solidYellowLeft.mp4')\nyellow_clip = clip2.fl_image(process_image)\n%time yellow_clip.write_videofile(yellow_output, audio=False)", "_____no_output_____" ], [ "HTML(\"\"\"\n<video width=\"960\" height=\"540\" controls>\n <source src=\"{0}\">\n</video>\n\"\"\".format(yellow_output))", "_____no_output_____" ] ], [ [ "## Writeup and Submission\n\nIf you're satisfied with your video outputs, it's time to make the report writeup in a pdf or markdown file. Once you have this Ipython notebook ready along with the writeup, it's time to submit for review! Here is a [link](https://github.com/udacity/CarND-LaneLines-P1/blob/master/writeup_template.md) to the writeup template file.\n", "_____no_output_____" ], [ "## Optional Challenge\n\nTry your lane finding pipeline on the video below. Does it still work? Can you figure out a way to make it more robust? If you're up for the challenge, modify your pipeline so it works with this video and submit it along with the rest of your project!", "_____no_output_____" ] ], [ [ "challenge_output = 'test_videos_output/challenge.mp4'\n## To speed up the testing process you may want to try your pipeline on a shorter subclip of the video\n## To do so add .subclip(start_second,end_second) to the end of the line below\n## Where start_second and end_second are integer values representing the start and end of the subclip\n## You may also uncomment the following line for a subclip of the first 5 seconds\n##clip3 = VideoFileClip('test_videos/challenge.mp4').subclip(0,5)\nclip3 = VideoFileClip('test_videos/challenge.mp4')\nchallenge_clip = clip3.fl_image(process_image)\n%time challenge_clip.write_videofile(challenge_output, audio=False)", "_____no_output_____" ], [ "HTML(\"\"\"\n<video width=\"960\" height=\"540\" controls>\n <source src=\"{0}\">\n</video>\n\"\"\".format(challenge_output))", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ] ]
cbfee820aebf73bdcb1634e41b859d4fa66ef6a5
1,763
ipynb
Jupyter Notebook
Repo-Ayudante/clases/clase_28_05_20_tp3/.ipynb_checkpoints/clase-checkpoint.ipynb
lucasliano/TC2
7a888a1cd4fae6a1aa89ca8f4d07ebe10526aa10
[ "MIT" ]
3
2020-05-02T17:14:57.000Z
2021-05-03T22:28:09.000Z
Repo-Ayudante/clases/clase_28_05_20_tp3/.ipynb_checkpoints/clase-checkpoint.ipynb
lucasliano/TC2
7a888a1cd4fae6a1aa89ca8f4d07ebe10526aa10
[ "MIT" ]
18
2020-04-04T21:09:04.000Z
2021-08-14T19:06:01.000Z
Repo-Ayudante/clases/clase_28_05_20_tp3/.ipynb_checkpoints/clase-checkpoint.ipynb
lucasliano/TC2
7a888a1cd4fae6a1aa89ca8f4d07ebe10526aa10
[ "MIT" ]
1
2020-04-04T20:00:21.000Z
2020-04-04T20:00:21.000Z
27.546875
292
0.579694
[ [ [ "# Clase 28/05/2020\n\n## Ejercicios TP3\n\n### Ejercicio 2\n\nObtener la Z(s) que corresponde a la siguiente función de fase:\n\n$ \\phi_{(w)} = tg^{-1} \\frac{-w^5 + 5 w^3 - 2w}{2 w^4 - w^2 + 5}$\n\nAyudas:\n\n- Revisar [apunte campus](https://www.campusvirtual.frba.utn.edu.ar/especialidad/pluginfile.php/61513/mod_resource/content/2/FASC03conFiguras.pdf) y [notebook](https://nbviewer.jupyter.org/github/agalbachicar/tc2/blob/master/notebooks/parte_de_funcion.ipynb) sobre parte de función. \n- ¡Tengan cuidado al pasar de $w$ a $s$!\n\n### Ejercicio 5\n\nEl siguiente diagrama de Bode corresponde a la respuesta en módulo de la transferencia de una red de énfasis, utilizada en un transmisor de FM para Broadcasting. Diseñar el circuito, verificando el mismo mediante simulación.\n\n<img src='bode_enfasis.png'>\n\nAyudas:\n\n- ¿Qué es una octava?\n- ¿Qué estructura me sirve para implementar una bilineal? ¿Y una bicuadrática?\n", "_____no_output_____" ] ] ]
[ "markdown" ]
[ [ "markdown" ] ]
cbfef52d113281373857b28071560ab0a551e9fb
747,988
ipynb
Jupyter Notebook
Scripts_MT_Structure/.ipynb_checkpoints/Exact_gradients-checkpoint.ipynb
nienkebrinkman/SS_MTI
2632214f7df9caaa53d33432193ba0602470d21a
[ "BSD-3-Clause" ]
null
null
null
Scripts_MT_Structure/.ipynb_checkpoints/Exact_gradients-checkpoint.ipynb
nienkebrinkman/SS_MTI
2632214f7df9caaa53d33432193ba0602470d21a
[ "BSD-3-Clause" ]
null
null
null
Scripts_MT_Structure/.ipynb_checkpoints/Exact_gradients-checkpoint.ipynb
nienkebrinkman/SS_MTI
2632214f7df9caaa53d33432193ba0602470d21a
[ "BSD-3-Clause" ]
null
null
null
263.840564
298,840
0.897299
[ [ [ "from os.path import exists, join, isfile\nfrom os import listdir, makedirs\nfrom obspy.geodetics import kilometer2degrees\nimport numpy as np\nfrom obspy.taup import TauPyModel\nimport matplotlib.pyplot as plt\nfrom SS_MTI import Inversion\n\nimport threading\nimport subprocess\n\nimport Create_Vmod\nfrom SS_MTI import Gradient, PhaseTracer, Misfit", "_____no_output_____" ] ], [ [ "# Fixed parameters (initial model)", "_____no_output_____" ] ], [ [ "bin_filepath = \"/home/nienke/Documents/Research/SS_MTI/External_packages/reflectivity_Mars/SRC/test/crfl_sac\"\nsave_path_OG = \"/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic\"\nif not exists(join(save_path_OG, \"start_v\")):\n makedirs(join(save_path_OG, \"start_v\"))\nf_start = join(save_path_OG, \"start_v\")\n\n## Fixed parameters:\nsrc_depth = 20.0\nepi_in_km = 1774.7380\nepi = kilometer2degrees(epi_in_km, radius=3389.5)\nbaz = 0.0\n\ndt = 0.025\n\nphases = [\"P\", \"S\", \"P\", \"S\", \"S\"]\ncomps = [\"Z\", \"T\", \"R\", \"Z\", \"R\"]\nt_pres = [1, 1, 1, 1, 1]\nt_posts = [30, 30, 30, 30, 30]\nylims = [2e-10, 2e-10, 1e-10, 3e-10, 2e-10]\n\nfmin = 0.2\nfmax = 0.6\nzerophase = False\n\n\n## Start parameters:\nbm_start_model = \"/home/nienke/Documents/Research/Data/MTI/MT_vs_STR/bm_models/TAYAK.bm\"\nm_rr = 0.3000\nm_tt = 0.1000 \nm_pp = 0.2000\nm_rt = 0.2000\nm_rp = 0.5000\nm_tp = 0.2000\n# mtt,mtp,mrt,mpp,mrp,mrr = 0.1000 0.2000 0.2000 0.1000 0.1000 0.2000 # TRUE MODEL\n\n\nfocal_mech = [m_rr, m_tt, m_pp, m_rt, m_rp, m_tp]\nMoho_d = 75.0\nm0 = np.hstack((focal_mech, Moho_d))\nsigmas = np.ones(len(phases)) * 1e-10\n\nCreate_Vmod.create_dat_file(\n src_depth, epi_in_km, baz, focal_mech, dt, f_start, bm_start_model,\n)\nCreate_Vmod.update_dat_file(\n dat_folder=f_start,\n m=m0,\n vpvs=False,\n depth=True,\n produce_tvel= True,\n tvel_name = \"Init\",\n)", "depth of MOHO (from TAYAK) will be changed\n" ], [ "m0", "_____no_output_____" ] ], [ [ "# Observed data", "_____no_output_____" ] ], [ [ "path_observed = (\n \"/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/obs_2/\"\n)\nnpz_file = \"/home/nienke/Documents/Research/Data/npz_files/TAYAK.npz\"\nst_obs = Gradient.read_refl_mseeds(path=path_observed)\nTaup = TauPyModel(npz_file)\nobs_tts = [PhaseTracer.get_traveltime(Taup, phase, src_depth, epi) for phase in phases]\nst_obs_w, st_obs_full, s_obs = Gradient.window(\n st_obs, phases, comps, obs_tts, t_pres, t_posts, fmin, fmax, zerophase,\n)", "_____no_output_____" ] ], [ [ "# Classic gradient descent", "_____no_output_____" ], [ "# ", "_____no_output_____" ] ], [ [ "update_nr = 25\ncurrent_update = 15\n\nepsilon = 0.001\nprior_crfl_filepath = join(f_start, \"crfl.dat\")\nInversion.gradient_descent(\n bin_path=bin_filepath,\n save_path=save_path_OG,\n epsilon=epsilon,\n update_nr=update_nr,\n dt=dt,\n sigmas=sigmas,\n st_obs_w=st_obs_w,\n current_update=current_update,\n prior_crfl_filepath=None,\n alphas=[1e-6, 1e-5, 5e-5, 1e-4, 5e-4, 1e-3, 1e-2, 1e-1],\n fmin=fmin,\n fmax=fmax,\n phases=phases,\n comps=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n)", "Forward run + misfit calc for m: [ 0.17155801 0.07807256 0.2 0.16605271 0.07805169\n -0.13982469 75.33353196]\n75.3335319577\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_0/75.33353195774892.tvel' ...\n63.1728349972\nForward run + misfit calc for m: [ 0.17165033 0.07807256 0.2 0.16605271 0.07805169\n -0.13982469 75.33353196]\n75.3335319577\nforward run in iteration: 1\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_1/75.33353195774892.tvel' ...\n63.1738912229\nForward run + misfit calc for m: [ 0.17155801 0.07816488 0.2 0.16605271 0.07805169\n -0.13982469 75.33353196]\n75.3335319577\nforward run in iteration: 2\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_2/75.33353195774892.tvel' ...\n63.172388371\nForward run + misfit calc for m: [ 0.17155801 0.07807256 0.20009232 0.16605271 0.07805169\n -0.13982469 75.33353196]\n75.3335319577\nforward run in iteration: 3\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_3/75.33353195774892.tvel' ...\n63.1728349972\nForward run + misfit calc for m: [ 0.17155801 0.07807256 0.2 0.16614503 0.07805169\n -0.13982469 75.33353196]\n75.3335319577\nforward run in iteration: 4\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_4/75.33353195774892.tvel' ...\n63.1728349972\nForward run + misfit calc for m: [ 0.17155801 0.07807256 0.2 0.16605271 0.07814401\n -0.13982469 75.33353196]\n75.3335319577\nforward run in iteration: 5\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_5/75.33353195774892.tvel' ...\n63.1728349972\nForward run + misfit calc for m: [ 0.17155801 0.07807256 0.2 0.16605271 0.07805169\n -0.13973238 75.33353196]\n75.3335319577\nforward run in iteration: 6\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_6/75.33353195774892.tvel' ...\n63.1750229061\nForward run + misfit calc for m: [ 0.17155801 0.07807256 0.2 0.16605271 0.07805169\n -0.13982469 75.34106531]\n75.3410653109\nforward run in iteration: 7\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_7/75.34106531094469.tvel' ...\n63.1268626688\nForward run + misfit calc for m: [ 0.17154657 0.0780774 0.2 0.16605271 0.07805169\n -0.13984839 75.33353806]\n75.3335380603\nforward run in iteration: 8\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_8/75.3335380602543.tvel' ...\n63.1717555509\nForward run + misfit calc for m: [ 0.1714436 0.07812094 0.2 0.16605271 0.07805169\n -0.14006169 75.33359298]\n75.3335929828\nforward run in iteration: 9\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_9/75.33359298280278.tvel' ...\n63.1641582709\nForward run + misfit calc for m: [ 0.17098596 0.07831445 0.2 0.16605271 0.07805169\n -0.14100967 75.33383708]\n75.333837083\nforward run in iteration: 10\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_10/75.33383708301828.tvel' ...\n63.1382743799\nForward run + misfit calc for m: [ 0.1704139 0.07855635 0.2 0.16605271 0.07805169\n -0.14219465 75.33414221]\n75.3341422083\nforward run in iteration: 11\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_11/75.33414220828763.tvel' ...\n63.1032717835\nForward run + misfit calc for m: [ 0.16583745 0.08049151 0.2 0.16605271 0.07805169\n -0.1516745 75.33658321]\n75.3365832104\nforward run in iteration: 12\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_12/75.33658321044251.tvel' ...\n62.8985402123\nForward run + misfit calc for m: [ 0.16011689 0.08291045 0.2 0.16605271 0.07805169\n -0.1635243 75.33963446]\n75.3396344631\nforward run in iteration: 13\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_13/75.33963446313611.tvel' ...\n62.8007841244\nForward run + misfit calc for m: [ 5.71468151e-02 1.26451464e-01 2.00000000e-01 1.66052714e-01\n 7.80516866e-02 -3.76820715e-01 7.53945570e+01]\n75.3945570116\nforward run in iteration: 14\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_14/75.39455701162076.tvel' ...\n90.0329499927\nForward run + misfit calc for m: [ -0.97255395 0.5618616 0.2 0.16605271 0.07805169\n -2.5097849 75.9437825 ]\n75.9437824965\nforward run in iteration: 15\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_15/It_15/75.94378249646734.tvel' ...\n2745.35688727\nforward run in iteration: 16\ndepth of MOHO (from TAYAK) will be changed\nthis is the iteration used for next update: 16\nForward run + misfit calc for m: [ 0.16011689 0.08291045 0.2 0.16605271 0.07805169\n -0.1635243 75.33963446]\n75.3396344631\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_16/It_0/75.33963446313611.tvel' ...\n62.8007841244\nForward run + misfit calc for m: [ 0.16020416 0.08291045 0.2 0.16605271 0.07805169\n -0.1635243 75.33963446]\n75.3396344631\nforward run in iteration: 1\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_16/It_1/75.33963446313611.tvel' ...\n62.7987023488\nForward run + misfit calc for m: [ 0.16011689 0.08299772 0.2 0.16605271 0.07805169\n -0.1635243 75.33963446]\n75.3396344631\nforward run in iteration: 2\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_16/It_2/75.33963446313611.tvel' ...\n62.8025841267\nForward run + misfit calc for m: [ 0.16011689 0.08291045 0.20008727 0.16605271 0.07805169\n -0.1635243 75.33963446]\n75.3396344631\nforward run in iteration: 3\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_16/It_3/75.33963446313611.tvel' ...\n62.8007841244\nForward run + misfit calc for m: [ 0.16011689 0.08291045 0.2 0.16613998 0.07805169\n -0.1635243 75.33963446]\n75.3396344631\nforward run in iteration: 4\ndepth of MOHO (from TAYAK) will be changed\n" ], [ "Gradient.plot_updates(\n save_path=save_path_OG,\n st_obs_full=st_obs_full,\n st_obs_w=st_obs_w,\n obs_tts=obs_tts,\n phases=phases,\n comps=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n fmin=fmin,\n fmax=fmax,\n zerophase=zerophase,\n ylims=ylims,\n)", "_____no_output_____" ], [ "Gradient.plot_misfits(save_path_OG,epsilon=0.001)", "[ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24]\n" ] ], [ [ "# Gaus-Newton method", "_____no_output_____" ], [ "## Jacobian calculation", "_____no_output_____" ] ], [ [ "def Exact_gradient(\n thread_folder: str,\n thread_nr: int,\n dat_folder: str,\n unit_v: np.array,\n bin_filepath: str,\n):\n print(thread_nr)\n print(unit_v)\n \"\"\" Thread worker function \"\"\"\n\n \"\"\" Copy .dat file into new folder \"\"\"\n dat_filepath = join(dat_folder, \"crfl.dat\")\n subprocess.call(f\"scp {dat_filepath} .\", shell=True, cwd=thread_folder)\n \"\"\" Copy binary file into new folder \"\"\"\n subprocess.call(f\"scp {bin_filepath} .\", shell=True, cwd=thread_folder)\n\n \"\"\" Plug unit vector into .dat file \"\"\"\n Create_Vmod.update_dat_file(\n dat_folder=thread_folder,\n m=unit_v,\n vpvs=False,\n depth=False,\n produce_tvel=True,\n tvel_name=\"exact\",\n )\n\n \"\"\" Run the reflectivity code \"\"\"\n print(f\"Running exact gradient on thread: {thread_nr}\\n\")\n subprocess.call(\"./crfl_sac\", shell=True, cwd=thread_folder)\n print(f\"thread: {thread_nr} is done\\n\")\n np.save(join(thread_folder, \"m.npy\"), unit_v)\n\n\ndef Get_J_moment(\n save_path: str,\n prior_dat_folder: str,\n bin_file_path: str,\n phases: [str],\n comps: [str],\n t_pres: [float],\n t_posts: [float],\n fmin: float,\n fmax: float,\n zerophase: bool,\n):\n\n unit_vs = np.array(\n (\n [1, 0, 0, 0, 0, 0],\n [0, 1, 0, 0, 0, 0],\n [0, 0, 1, 0, 0, 0],\n [0, 0, 0, 1, 0, 0],\n [0, 0, 0, 0, 1, 0],\n [0, 0, 0, 0, 0, 1],\n )\n )\n J_approx = []\n\n threads = []\n dat_folder = prior_dat_folder # f_start\n \"\"\" Creating the exact seismograms: \"\"\"\n for i in range(6):\n \"\"\" Create folder for each thread \"\"\"\n thread_folder = join(save_path, f\"Exact_{i}\")\n if not exists(thread_folder):\n makedirs(thread_folder)\n \"\"\" Run forward model with each unit vector \"\"\"\n exact_st = [\n f\n for f in listdir(thread_folder)\n if f.startswith(\"st00\")\n if isfile(join(thread_folder, f))\n ]\n if not exact_st:\n t = threading.Thread(\n target=Exact_gradient,\n args=[thread_folder, i, dat_folder, unit_vs[:, i], bin_filepath],\n )\n threads.append(t)\n t.start()\n if not exact_st:\n for thread in threads:\n thread.join()\n\n \"\"\" Reading in seismograms and substituting into approximate Green's functions: \"\"\"\n for i in range(6):\n thread_folder = join(save_path, f\"Exact_{i}\")\n st = Gradient.read_refl_mseeds(path=thread_folder, stack=False)\n\n \"\"\" Window the data \"\"\"\n npz_name = [\n f\n for f in listdir(thread_folder)\n if f.endswith(\".npz\")\n if isfile(join(thread_folder, f))\n ]\n if npz_name:\n npz_file = join(thread_folder, npz_name[0],)\n dat_file = join(thread_folder)\n\n Taup = TauPyModel(npz_file)\n depth = Create_Vmod.read_depth_from_dat(dat_file)\n epi = Create_Vmod.read_epi_from_dat(dat_file)\n syn_tts = []\n for j, phase in enumerate(phases):\n syn_tts.append(PhaseTracer.get_traveltime(Taup, phase, depth, epi))\n else:\n syn_tts = Gradient.get_tt_from_dat_file(phases, thread_folder, \"exact\")\n st_syn_w, st_syn_full, s_syn = Gradient.window(\n st, phases, comps, syn_tts, t_pres, t_posts, fmin, fmax, zerophase,\n )\n\n if i == 0:\n J_approx = np.zeros((len(s_syn), 6))\n J_approx[:, i] = s_syn\n else:\n J_approx[:, i] = s_syn\n return J_approx\n\n\ndef proposal(\n m:[float],\n save_path_OG: str,\n new_folder_name: str,\n dat_folder: str,\n phases: [str],\n comps: [str],\n t_pres: [float],\n t_posts: [float],\n dt: float,\n sigmas: [float],\n fmin: float,\n fmax:float,\n zerophase:bool,\n):\n save_folder = join(save_path_OG, new_folder_name)\n if not exists(save_folder):\n makedirs(save_folder)\n\n if exists(join(save_folder, \"It_0\")):\n st_file = [\n f\n for f in listdir(join(save_folder, \"It_0\"))\n if f.startswith(\"st00\")\n if isfile(join(save_folder, \"It_0\", f))\n ]\n else:\n st_file = []\n if st_file:\n st_syn = Gradient.read_refl_mseeds(path=join(save_folder, \"It_0\"), stack=False)\n else:\n src_str = Gradient.SRC_STR(\n binary_file_path=bin_filepath,\n prior_dat_filepath=join(dat_folder, \"crfl.dat\"),\n save_folder=save_folder,\n phases=phases,\n components=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n vpvs=False,\n depth=True,\n dt=dt,\n sigmas=sigmas,\n tstars=None,\n fmin=fmin,\n fmax=fmax,\n zerophase=zerophase,\n start_it=0,\n )\n\n st_syn = src_str.forward(m)\n \n \n \"\"\" Window the data \"\"\"\n if exists(join(save_folder, \"It_0\")):\n npz_name = [\n f\n for f in listdir(join(save_folder, \"It_0\"))\n if f.endswith(\".npz\")\n if isfile(join(save_folder, \"It_0\", f))\n ]\n else:\n npz_name = []\n\n if npz_name:\n npz_file = join(save_folder, \"It_0\", npz_name[0],)\n dat_file = join(save_folder, \"It_0\")\n\n Taup = TauPyModel(npz_file)\n depth = Create_Vmod.read_depth_from_dat(dat_file)\n epi = Create_Vmod.read_epi_from_dat(dat_file)\n syn_tts = []\n for i, phase in enumerate(phases):\n syn_tts.append(PhaseTracer.get_traveltime(Taup, phase, depth, epi))\n else:\n syn_tts = Gradient.get_tt_from_dat_file(phases, join(save_folder, \"It_0\"), m[-1])\n st_syn_w, st_syn_full, s_syn = Gradient.window(\n st_syn, phases, comps, syn_tts, t_pres, t_posts, fmin, fmax, zerophase,\n )\n \n \n return st_syn_w, st_syn_full, s_syn", "_____no_output_____" ] ], [ [ "## Start the inversion:", "_____no_output_____" ] ], [ [ "save_path_OG = \"/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/\"", "_____no_output_____" ], [ "update_nr = 4\ncurrent_update = 2\nupdate = 0\n\n \nlambd_0 = 100. # Initial lambda value\nnu = 1.5\n\nmisfits = np.zeros(update_nr + 1)\nwhile update < update_nr:\n if update == 0:\n accepted_folder = f_start\n \n# if current_update != 0:\n# \"\"\" Check where the previous update ended and take this crfl.dat file as prior file\"\"\"\n# prev_update = current_update - 1\n\n \"\"\" \n Step 1: Get the Gradient of seismogram w.r.t moment tensor paramters based on unit vectors: \n \"\"\"\n J_m = Get_J_moment(\n save_path=join(save_path_OG,f\"Update_{update}\"),\n prior_dat_folder=accepted_folder,\n bin_file_path=bin_filepath,\n phases=phases,\n comps=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n fmin=fmin,\n fmax=fmax,\n zerophase=zerophase,\n )\n \"\"\" \n Step 2: Forward run with the actual model parameters\n - This is necessary for the Jacobian of the structural parameters\n - This is necessary for the gradient of the misfit w.r.t. the seismometer\n - This is necessary for the misfit calculation\n \"\"\"\n\n\n st_syn_w0, st_syn_full0, s_syn0 = proposal(\n m=m0,\n save_path_OG=save_path_OG,\n new_folder_name=f\"Update_{update}\",\n dat_folder=accepted_folder,\n phases=phases,\n comps=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n dt=dt,\n sigmas=sigmas,\n fmin=fmin,\n fmax=fmax,\n zerophase = False,\n )\n\n \"\"\" Calculate misfit \"\"\"\n xi0 = np.sum(Misfit.L2().run_misfit(phases, st_obs_w, st_syn_w0, sigmas ** 2))\n\n \n \"\"\" Save initial guess parameters: \"\"\"\n if update == 0:\n if not exists(join(save_path_OG, \"start_v\")):\n makedirs(join(save_path_OG, \"start_v\"))\n np.save(join(save_path_OG, \"start_v\",\"m1_initial.npy\"),m0)\n st = Gradient.read_refl_mseeds(path = join(save_path_OG,f\"Update_{update}\",\"It_0\"),stack = False)\n st.write(join(save_path_OG, \"start_v\", \"st_m1.mseed\"), format=\"MSEED\")\n np.save(join(save_path_OG, \"start_v\", \"misfit.npy\"),xi0)\n \n\n \"\"\" \n Step 3: Get the Jacobian of the structural parameters\n \"\"\"\n\n \"\"\" Then we need S1 and epsilon \"\"\"\n epsilon = 0.001\n m1 = m0\n m1[-1] += epsilon * m1[-1]\n\n st_syn_w1, st_syn_full1, s_syn1 = proposal(\n m=m1,\n save_path_OG=join(save_path_OG,f\"Update_{update}\"),\n new_folder_name=\"Depth_update\",\n dat_folder=accepted_folder,\n phases=phases,\n comps=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n dt=dt,\n sigmas=sigmas,\n fmin=fmin,\n fmax=fmax,\n zerophase = False,\n )\n\n J_str = np.expand_dims((s_syn1 - s_syn0) / epsilon, axis=1)\n \"\"\" \n Step 4: Combine the two Jacobians (moment tensor + structure)\n \"\"\"\n J_total = np.hstack((J_m, J_str)) / np.mean(sigmas)**2\n\n \"\"\" \n Step 5: Get the gradient of the misfit w.r.t. the seismogram \n (i.e., derivative of the L2-norm) \n \"\"\"\n dxi_ds = np.expand_dims(-(s_obs - s_syn0) / (np.mean(sigmas) ** 2), axis=1)\n\n\n \"\"\" \n Step 6: Do update\n NOTE: we have a singular matrix unfortunately, so therefore we will make use of the Levenberg-Marquardt algorithm,\n which is basically adding damping.\n Instead of the identity matrix we use diag(J.T@J): \n Fletcher(1971)-A modified Marquardt subroutine for non-linear least squares, \n zero values will be filled with average logaritmic values.\n \"\"\"\n\n# I = np.eye(J_total.shape[1]) # Levenberg-Marquardt algorithm\n \n diag = np.diag(J_total.T @ J_total)\n diag.setflags(write=1)\n # Fill zero-values with logaritmic average values:\n zero_diag = np.where(diag == 0)\n non_zero_inds = np.where(diag != 0)\n if zero_diag:\n for z in zero_diag[0]:\n print(np.mean(diag[diag != 0]))\n diag[z] =np.exp(np.mean(np.log(diag[diag != 0])))\n diag[non_zero_inds]=0.\n I = np.diag(diag) \n \n \n# diag = np.diag(J_total.T @ J_total)\n# diag.setflags(write=1)\n# # Fill zero-values with logaritmic average values:\n# zero_diag = np.where(diag == 0)\n# if zero_diag:\n# for z in zero_diag[0]:\n# print(z)\n# diag[z] = np.exp(np.mean(np.log(diag[diag != 0])))\n# I = np.diag(diag) # Levenberg-Marquardt Fletcher\n \n \"\"\" \n Step 7: Test two scenarios:\n 1. Lambda\n 2. Lambda/nu\n \"\"\"\n update_current = update\n while update_current == update:\n print(f\"lamda 0 = {lambd_0}\")\n \n\n xi1s=np.ones(2)*9e9\n m1_props = np.zeros((len(m0),2))\n for i in range(2): \n \"\"\" 1. lambda_0\"\"\"\n if i == 0:\n lambd = lambd_0\n prop_folder = join(save_path_OG,f\"Update_{update}\",\"lambd_0\")\n\n elif i == 1:\n lambd = lambd_0/nu\n prop_folder = join(save_path_OG,f\"Update_{update}\",\"lambd_0_v\")\n\n J_inv = np.linalg.inv(J_total.T @ J_total + lambd * I)\n J_d = J_total.T @ dxi_ds\n\n m1_props[:,i] = m0 + (J_inv @ J_d)[:,0]\n xi1s[i] = Gradient.SRC_STR(\n binary_file_path=bin_filepath,\n prior_dat_filepath=join(accepted_folder,\"crfl.dat\"),\n save_folder=prop_folder,\n phases=phases,\n components=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n vpvs=False,\n depth=True,\n dt=dt,\n sigmas=sigmas,\n tstars=None,\n fmin=fmin,\n fmax=fmax,\n zerophase=zerophase,\n start_it=0,\n ).misfit(m1_props[:,i],st_obs_w)\n\n ## Make a choice for updating \n if xi1s[0] < xi0 and xi1s[1] < xi0: \n \"\"\"\n Both proposals are smaller in misfit,\n we then choose lambda/nu because we aim for least damping\n \"\"\"\n print(\"update 1\")\n lambd_0 /= nu \n # Save Everything in current update:\n\n np.save(join(save_path_OG,f\"Update_{update}\",f\"m1_.npy\"),m1_props[:,1])\n st = Gradient.read_refl_mseeds(path = join(save_path_OG,f\"Update_{update}\",\"lambd_0_v\",\"It_0\"),stack = False)\n st.write(join(save_path_OG,f\"Update_{update}\", \"st_m1.mseed\"), format=\"MSEED\")\n np.save(join(save_path_OG,f\"Update_{update}\", \"misfit.npy\"),xi1s[1])\n np.save(join(save_path_OG,f\"Update_{update}\", \"lambda_0_v.npy\"),lambd_0 / nu)\n\n\n #DO UPTDATE\n accepted_folder = join(save_path_OG,f\"Update_{update}\",\"lambd_0_v\",\"It_0\")\n update +=1\n elif xi1s[0] < xi0 and xi1s[1] > xi0: \n print(\"update 2\")\n lambd_0 = lambd_0 \n #DO UPTDATE\n\n np.save(join(save_path_OG,f\"Update_{update}\",f\"m1_.npy\"),m1_props[:,0])\n st = Gradient.read_refl_mseeds(path = join(save_path_OG,f\"Update_{update}\",\"lambd_0\",\"It_0\"),stack = False)\n st.write(join(save_path_OG,f\"Update_{update}\", \"st_m1.mseed\"), format=\"MSEED\")\n np.save(join(save_path_OG,f\"Update_{update}\", \"misfit.npy\"),xi1s[0])\n np.save(join(save_path_OG,f\"Update_{update}\", \"lambda_0.npy\"),lambd_0)\n\n accepted_folder = join(save_path_OG,f\"Update_{update}\",\"lambd_0\",\"It_0\")\n\n update+= 1\n else: # No update, because both new misfits are smaller\n print(\"update 3 (no update)\")\n lambd_0 *= nu\n\n", "0\n[1 0 0 0 0 0]\n1\n[0 1 0 0 0 0]\n2\n[0 0 1 0 0 0]\n3\n[0 0 0 1 0 0]\nRunning exact gradient on thread: 0\n\nRunning exact gradient on thread: 1\n\n4\nRunning exact gradient on thread: 2\n\n[0 0 0 0 1 0]5\n[0 0 0 0 0 1]\n\nRunning exact gradient on thread: 3\n\nRunning exact gradient on thread: 5\n\nRunning exact gradient on thread: 4\n\nthread: 0 is done\n\nthread: 3 is done\n\nthread: 4 is done\n\nthread: 2 is done\n\nthread: 5 is done\n\nthread: 1 is done\n\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/Exact_0/exact.tvel' ...\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/Exact_1/exact.tvel' ...\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/Exact_2/exact.tvel' ...\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/Exact_3/exact.tvel' ...\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/Exact_4/exact.tvel' ...\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/Exact_5/exact.tvel' ...\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/It_0/76.28525117896503.tvel' ...\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/Depth_update/It_0/76.36153643014399.tvel' ...\n1.74267920469e+26\nlamda 0 = 100.0\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0/It_0/76.36114195435894.tvel' ...\n384.799431146\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0_v/It_0/76.36114195435894.tvel' ...\n384.799431146\nupdate 3 (no update)\nlamda 0 = 150.0\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0/It_0/76.36114195435894.tvel' ...\n384.799431146\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0_v/It_0/76.36114195435894.tvel' ...\n384.799431146\nupdate 3 (no update)\nlamda 0 = 225.0\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0/It_0/76.36114195435894.tvel' ...\n384.799431146\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0_v/It_0/76.36114195435894.tvel' ...\n384.799431146\nupdate 3 (no update)\nlamda 0 = 337.5\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0/It_0/76.36114195435894.tvel' ...\n384.799431146\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0_v/It_0/76.36114195435894.tvel' ...\n384.799431146\nupdate 3 (no update)\nlamda 0 = 506.25\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0/It_0/76.36114195435894.tvel' ...\n384.799431146\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0_v/It_0/76.36114195435894.tvel' ...\n384.799431146\nupdate 3 (no update)\nlamda 0 = 759.375\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0/It_0/76.36114195435894.tvel' ...\n384.799431146\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\nBuilding obspy.taup model for '/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gauss_newton/Update_0/lambd_0_v/It_0/76.36114195435894.tvel' ...\n384.799431146\nupdate 3 (no update)\nlamda 0 = 1139.0625\nForward run + misfit calc for m: [ 0.43429661 0.11994378 0.2 0.19526386 1.10837779\n 0.62181319 76.36114195]\n76.3611419544\nforward run in iteration: 0\ndepth of MOHO (from TAYAK) will be changed\n" ], [ "diag = np.diag(J_total.T @ J_total)\ndiag.setflags(write=1)\n# Fill zero-values with logaritmic average values:\nzero_diag = np.where(diag == 0)\nnon_zero_inds = np.where(diag != 0)\nif zero_diag:\n for z in zero_diag[0]:\n print(np.mean(diag[diag != 0]))\n diag[z] =100#np.exp(np.mean(np.log(diag[diag != 0])))\ndiag[non_zero_inds]=0.\nI = np.diag(diag) \nprint(I)", "1.74267920469e+26\n[[ 0. 0. 0. 0. 0. 0. 0.]\n [ 0. 0. 0. 0. 0. 0. 0.]\n [ 0. 0. 100. 0. 0. 0. 0.]\n [ 0. 0. 0. 0. 0. 0. 0.]\n [ 0. 0. 0. 0. 0. 0. 0.]\n [ 0. 0. 0. 0. 0. 0. 0.]\n [ 0. 0. 0. 0. 0. 0. 0.]]\n" ], [ "J_inv = np.linalg.inv(J_total.T @ J_total + 1e2 * I)\nJ_d = J_total.T @ dxi_ds\n\nprop = m0 + (J_inv @ J_d)[:,0]", "_____no_output_____" ], [ "J_inv @ J_total.T", "_____no_output_____" ], [ "J_total[:,2]", "_____no_output_____" ], [ "Gradient.plot_updates(\n save_path=save_path_OG,\n st_obs_full=st_obs_full,\n st_obs_w=st_obs_w,\n obs_tts=obs_tts,\n phases=phases,\n comps=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n fmin=fmin,\n fmax=fmax,\n zerophase=zerophase,\n ylims=ylims,\n)", "_____no_output_____" ], [ "fig, ax = plt.subplots(nrows=1, ncols=1, sharex=\"all\", figsize=(8, 8))\n\nupdate_folders = np.sort(\n np.asarray(\n [int(f.strip(\"Update_\")) for f in listdir(save_path_OG) if f.startswith(\"misfit\")]\n )\n)\nprint(update_folders)\n\nXs = np.array(\n [\n np.load(join(save_path, f\"Update_{update_folders[up_nr]}\", f\"misfit.npy\"))\n for up_nr in update_folders\n ]\n)\nms = np.arange(0, len(Xs))\nax.semilogy(ms, Xs)\nax.tick_params(axis=\"both\", which=\"major\", labelsize=15)\nax.tick_params(axis=\"both\", which=\"minor\", labelsize=15)\nax.set_xlabel(\"Update nr\", fontsize=20)\nax.set_ylabel(\"Misfit\", fontsize=20)\nax.set_xticks(ms)\nax.set_xticklabels(ms)", "_____no_output_____" ], [ "(xi1s < xi0).any()", "_____no_output_____" ], [ "dxi_ds", "_____no_output_____" ], [ "J_d", "_____no_output_____" ], [ "diag", "_____no_output_____" ], [ "dxi_ds.T.shape", "_____no_output_____" ], [ "J_total.T @ dxi_ds.T", "_____no_output_____" ] ], [ [ "# Two-step inversion:", "_____no_output_____" ], [ "### Get misfit w.r.t. seismogram\nSeismogram is computed using m0 (so initial guess)", "_____no_output_____" ] ], [ [ "\"\"\" Forward run with the actual model parameters \"\"\"\nsave_folder = join(save_path_OG, \"Update_1\")\nif not exists(save_folder):\n makedirs(save_folder)\n\nst_file = [\n f\n for f in listdir(join(save_folder, \"It_0\"))\n if f.startswith(\"st00\")\n if isfile(join(save_folder, \"It_0\", f))\n]\nif st_file:\n st_syn = Gradient.read_refl_mseeds(path=st_file, stack=False)\nelse:\n src_str = Gradient.SRC_STR(\n binary_file_path=bin_filepath,\n prior_dat_filepath=join(f_start, \"crfl.dat\"),\n save_folder=save_folder,\n phases=phases,\n components=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n vpvs=False,\n depth=True,\n dt=dt,\n sigmas=sigmas,\n tstars=None,\n fmin=fmin,\n fmax=fmax,\n zerophase=False,\n start_it=0,\n )\n\n st_syn = src_str.forward(m0)\n\n\"\"\" Window the data \"\"\"\nnpz_name = [\n f\n for f in listdir(join(save_folder, \"It_0\"))\n if f.endswith(\".npz\")\n if isfile(join(save_folder, \"It_0\", f))\n]\nif npz_name:\n npz_file = join(save_folder, \"It_0\", npz_name[0],)\n dat_file = join(save_folder, \"It_0\")\n\n Taup = TauPyModel(npz_file)\n depth = Create_Vmod.read_depth_from_dat(dat_file)\n epi = Create_Vmod.read_epi_from_dat(dat_file)\n syn_tts = []\n for i, phase in enumerate(phases):\n syn_tts.append(PhaseTracer.get_traveltime(Taup, phase, depth, epi))\nelse:\n syn_tts = Gradient.get_tt_from_dat_file(phases, join(save_folder, \"It_0\"), m0[-1])\nst_syn_w, st_syn_full, s_syn = Gradient.window(\n st_syn, phases, comps, syn_tts, t_pres, t_posts, fmin, fmax, zerophase,\n)", "_____no_output_____" ] ], [ [ "### Gradient of seismogram w.r.t. structural update", "_____no_output_____" ] ], [ [ "\"\"\" We have the seismogram of m0 (from previous cell)\"\"\"\ns0 = s_syn\n\n\"\"\" Then we need S1 and epsilon \"\"\"\nepsilon = 0.001\nm1 = m0\nm1[-1] += epsilon * m1[-1]\n\nsave_folder = join(save_path_OG, \"Depth_update\")\nif not exists(save_folder):\n makedirs(save_folder)\n\nst_file = [\n f\n for f in listdir(join(save_folder, \"It_0\"))\n if f.startswith(\"st00\")\n if isfile(join(save_folder, \"It_0\", f))\n]\nif st_file:\n st_syn = Gradient.read_refl_mseeds(path=st_file, stack=False)\nelse:\n src_str = Gradient.SRC_STR(\n binary_file_path=bin_filepath,\n prior_dat_filepath=join(f_start, \"crfl.dat\"),\n save_folder=save_folder,\n phases=phases,\n components=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n vpvs=False,\n depth=True,\n dt=dt,\n sigmas=sigmas,\n tstars=None,\n fmin=fmin,\n fmax=fmax,\n zerophase=False,\n start_it=0,\n )\n\n st_syn = src_str.forward(m1)\n\n\"\"\" Window the data \"\"\"\nnpz_name = [\n f\n for f in listdir(join(save_folder, \"It_0\"))\n if f.endswith(\".npz\")\n if isfile(join(save_folder, \"It_0\", f))\n]\nif npz_name:\n npz_file = join(save_folder, \"It_0\", npz_name[0],)\n dat_file = join(save_folder, \"It_0\")\n\n Taup = TauPyModel(npz_file)\n depth = Create_Vmod.read_depth_from_dat(dat_file)\n epi = Create_Vmod.read_epi_from_dat(dat_file)\n syn_tts = []\n for i, phase in enumerate(phases):\n syn_tts.append(PhaseTracer.get_traveltime(Taup, phase, depth, epi))\nelse:\n syn_tts = Gradient.get_tt_from_dat_file(phases, join(save_folder, \"It_0\"), m0[-1])\nst_syn_w1, st_syn_full1, s1 = Gradient.window(\n st_syn, phases, comps, syn_tts, t_pres, t_posts, fmin, fmax, zerophase,\n)", "_____no_output_____" ], [ "\"\"\" Get the approximate gradient of the depth \"\"\"\nJ_depth = np.expand_dims((s1 - s0) / epsilon, axis=1)", "_____no_output_____" ], [ "J_depth.shape", "_____no_output_____" ] ], [ [ "### Add depth gradient to exact gradient", "_____no_output_____" ] ], [ [ "J_total = np.hstack((J_approx, J_depth))", "_____no_output_____" ] ], [ [ "### Calculate derivative of misfit w.r.t. seismogram (using L2)", "_____no_output_____" ] ], [ [ "# Considering L2:\ndxi_ds = np.expand_dims(-(s_obs - s_syn) / (np.mean(sigmas) ** 2), axis=0)", "_____no_output_____" ], [ "dxi_ds.shape\n\nG_total.shape", "_____no_output_____" ] ], [ [ "### Determine the gradient of the misfit w.r.t. model params", "_____no_output_____" ] ], [ [ "dxi_dm = dxi_ds @ G_total", "_____no_output_____" ], [ "dxi_dm", "_____no_output_____" ], [ "plt.semilogy(dxi_dm[0, :])", "_____no_output_____" ] ], [ [ "### Two step inversion:\n#### 1. Invert the moment tensor using the exact gradients", "_____no_output_____" ] ], [ [ "\"\"\" 1.1 Get the Green's function for the moment tensor:\"\"\"\nsave_path_OG = \"/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Exact_Gradient\"\nG_approx = Get_G(\n save_path=save_path_OG,\n prior_dat_folder=f_start,\n bin_file_path=bin_filepath,\n phases=phases,\n comps=comps,\n t_pres=t_pres,\n t_posts=t_posts,\n fmin=fmin,\n fmax=fmax,\n zerophase=False\n)", "_____no_output_____" ], [ "\nnp.diag(1/(sigmas**2)).shape", "_____no_output_____" ], [ "Wd = np.diag(np.ones(G_approx.shape[0])) * (1/sigmas[0]**2)", "_____no_output_____" ], [ "s_obs_dims = np.expand_dims(s_obs,axis = 1)", "_____no_output_____" ], [ "A = G_approx.T @ G_approx\nB = G_approx.T @ s_obs_dims\n\n\nM = np.linalg.solve(A, B)\n# M = np.linalg.lstsq(A, B)[0]", "_____no_output_____" ], [ "M", "_____no_output_____" ], [ "plt.figure(figsize=(16,8))\n\n# for i in range(6):\nplt.plot(G_approx[:,2])", "_____no_output_____" ], [ "st_file = \"/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Exact_Gradient/Exact_5/\"\nst_syn = Gradient.read_refl_mseeds(path=st_file, stack=False)\nplt.figure(figsize=(16,8))\n\nfor tr in st_syn:\n plt.plot(tr.times(),tr.data)", "_____no_output_____" ] ], [ [ "### 2. Invert the structure using approximate gradient", "_____no_output_____" ] ], [ [ "np.load(\"/home/nienke/Documents/Research/SS_MTI/External_packages/Test_reflectivity/Gradient_descent_classic/Update_6/X1s_0.001.npy\")", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
cbfef7ac7e7db2ca5e912b0319b20f2bcf5db75f
25,495
ipynb
Jupyter Notebook
data_explore/.ipynb_checkpoints/sklearn_classifier-checkpoint.ipynb
ubuntu733/SentencePairs
367139cdd94be36c04899c3ea01df6f58e796241
[ "Apache-2.0" ]
null
null
null
data_explore/.ipynb_checkpoints/sklearn_classifier-checkpoint.ipynb
ubuntu733/SentencePairs
367139cdd94be36c04899c3ea01df6f58e796241
[ "Apache-2.0" ]
null
null
null
data_explore/.ipynb_checkpoints/sklearn_classifier-checkpoint.ipynb
ubuntu733/SentencePairs
367139cdd94be36c04899c3ea01df6f58e796241
[ "Apache-2.0" ]
null
null
null
40.212934
287
0.59949
[ [ [ "from sklearn.model_selection import RandomizedSearchCV\nfrom sklearn.model_selection import cross_validate\nimport numpy as np\nimport xgboost as xgb\nimport pandas as pd", "_____no_output_____" ], [ "train_datasetL = pd.read_csv(\"../data/ori_data/train_process.csv\", header=None, sep=\"\\t\").iloc[:, 0].values\ndev_datasetL = pd.read_csv(\"../data/ori_data/dev_process.csv\", header=None, sep=\"\\t\").iloc[:, 0].values", "_____no_output_____" ], [ "train_datamatrix = np.load(\"../data/ori_data/train.featurematrix.data\")\ndev_datamatrix = np.load(\"../data/ori_data/dev.featurematrix.data\")", "_____no_output_____" ], [ "from sklearn.ensemble import RandomForestClassifier\n\nrandomforest_classifier = RandomForestClassifier(n_estimators=10, criterion='gini', max_depth=None, \n min_samples_split=2, min_samples_leaf=1, \n min_weight_fraction_leaf=0.0, max_features='auto', \n max_leaf_nodes=None, min_impurity_decrease=0.0, \n min_impurity_split=None, bootstrap=True, oob_score=False, n_jobs=1, \n random_state=None, verbose=0, warm_start=False, class_weight=\"balanced\")\nrandomforest_classifier.fit(train_datamatrix, train_datasetL)", "_____no_output_____" ], [ "preds = randomforest_classifier.predict(dev_datamatrix)\npred_label = preds >= 0.5\npred_label = pred_label.astype(int)\nfrom sklearn.metrics import classification_report\nprint(classification_report(dev_datasetL, pred_label))", " precision recall f1-score support\n\n 0 0.83 0.97 0.90 16751\n 1 0.52 0.14 0.22 3744\n\navg / total 0.78 0.82 0.77 20495\n\n" ], [ "from sklearn.linear_model import LogisticRegression\nlinear_classifier = LogisticRegression(class_weight=\"balanced\", max_iter=1000)\nlinear_classifier.fit(train_datamatrix, train_datasetL)", "_____no_output_____" ], [ "preds = linear_classifier.predict(dev_datamatrix)\npred_label = preds >= 0.5\npred_label = pred_label.astype(int)\nfrom sklearn.metrics import classification_report\nprint(classification_report(dev_datasetL, pred_label))", " precision recall f1-score support\n\n 0 0.90 0.68 0.78 16751\n 1 0.32 0.67 0.44 3744\n\navg / total 0.80 0.68 0.72 20495\n\n" ], [ "import pickle\npickle.dump(linear_classifier, open(\"../data/m_result/linear_classifier.model\", \"wb\"), 2)", "_____no_output_____" ], [ "# specify parameters via map\nxgb_classifier = xgb.XGBClassifier(max_depth=11, learning_rate=0.01, n_estimators=1000, \n silent=0, objective='binary:logistic', booster='gbtree', \n n_jobs=1, nthread=None, gamma=0, min_child_weight=2, max_delta_step=0, subsample=1, \n colsample_bytree=1, colsample_bylevel=1, reg_alpha=0, \n reg_lambda=1, scale_pos_weight=4, \n base_score=0.5, random_state=0, seed=None, missing=None)\n\nxgb_classifier.fit(train_datamatrix, train_datasetL)", "_____no_output_____" ], [ "# make prediction\npreds = xgb_classifier.predict(dev_datamatrix)\n\npred_label = preds >= 0.5\npred_label = pred_label.astype(int)\nfrom sklearn.metrics import classification_report\nprint(classification_report(dev_datasetL, pred_label))", " precision recall f1-score support\n\n 0 0.88 0.84 0.86 16751\n 1 0.40 0.48 0.44 3744\n\navg / total 0.79 0.78 0.78 20495\n\n" ], [ "pickle.dump(bst, open(\"../data/m_result/xgboost_3.model\", \"wb\"), 2)", "_____no_output_____" ], [ "from sklearn.neighbors import KNeighborsClassifier\n\nknn_classifier = KNeighborsClassifier(n_neighbors=5, weights='uniform', \n algorithm='auto', leaf_size=30, \n p=2, metric='minkowski', \n metric_params=None, n_jobs=1)\nknn_classifier.fit(train_datamatrix, train_datasetL)", "_____no_output_____" ], [ "# make prediction\npreds = knn_classifier.predict(dev_datamatrix)\n\npred_label = preds >= 0.5\npred_label = pred_label.astype(int)\nfrom sklearn.metrics import classification_report\nprint(classification_report(dev_datasetL, pred_label))", " precision recall f1-score support\n\n 0 0.83 0.94 0.89 16751\n 1 0.40 0.16 0.23 3744\n\navg / total 0.75 0.80 0.77 20495\n\n" ], [ "# select same number of pos and neg example\n\ndef select_trainset(datasetM, datasetL, neg_ratio=1):\n \n pos_index_list = []\n for index, val in enumerate(datasetL):\n if val == 1:\n pos_index_list.append(index)\n \n pos_trainM = datasetM[pos_index_list, :]\n pos_trainL = datasetL[pos_index_list]\n \n neg_index_list = [index for index in range(len(datasetL)) if index not in pos_index_list]\n \n \n neg_len = len(pos_index_list) * neg_ratio\n permuation_index = np.random.permutation(len(neg_index_list))\n neg_index_selected = permuation_index[:neg_len]\n \n neg_trainM = datasetM[neg_index_selected, :]\n neg_trainL = datasetL[neg_index_selected]\n \n return np.concatenate((pos_trainM, neg_trainM)), np.concatenate((pos_trainL, neg_trainL))", "_____no_output_____" ], [ "selected_trainM, selected_trainL = select_trainset(train_datamatrix, train_datasetL, neg_ratio=1)\n\nprint(selected_trainM.shape) \nprint(selected_trainL.shape)", "(29882, 88)\n(29882,)\n" ], [ "# specify parameters via map\nxgb_classifier = xgb.XGBClassifier(max_depth=7, learning_rate=0.1, n_estimators=1000, \n silent=0, objective='binary:logistic', booster='gbtree', \n n_jobs=1, nthread=None, gamma=0, min_child_weight=2, max_delta_step=0, subsample=1, \n colsample_bytree=1, colsample_bylevel=1, reg_alpha=0, \n reg_lambda=1, scale_pos_weight=1, \n base_score=0.5, random_state=0, seed=None, missing=None)\n\nxgb_classifier.fit(selected_trainM, selected_trainL, \n eval_set=[(selected_trainM, selected_trainL), (dev_datamatrix, dev_datasetL)],\n early_stopping_rounds=100, eval_metric=\"auc\")", "[0]\tvalidation_0-auc:0.775401\tvalidation_1-auc:0.718834\nMultiple eval metrics have been passed: 'validation_1-auc' will be used for early stopping.\n\nWill train until validation_1-auc hasn't improved in 100 rounds.\n[1]\tvalidation_0-auc:0.783165\tvalidation_1-auc:0.727415\n[2]\tvalidation_0-auc:0.787075\tvalidation_1-auc:0.728952\n[3]\tvalidation_0-auc:0.790219\tvalidation_1-auc:0.731008\n[4]\tvalidation_0-auc:0.794188\tvalidation_1-auc:0.733995\n[5]\tvalidation_0-auc:0.796786\tvalidation_1-auc:0.735321\n[6]\tvalidation_0-auc:0.79857\tvalidation_1-auc:0.735897\n[7]\tvalidation_0-auc:0.801264\tvalidation_1-auc:0.737186\n[8]\tvalidation_0-auc:0.802788\tvalidation_1-auc:0.737104\n[9]\tvalidation_0-auc:0.804593\tvalidation_1-auc:0.73783\n[10]\tvalidation_0-auc:0.805867\tvalidation_1-auc:0.738461\n[11]\tvalidation_0-auc:0.807701\tvalidation_1-auc:0.739517\n[12]\tvalidation_0-auc:0.808946\tvalidation_1-auc:0.740406\n[13]\tvalidation_0-auc:0.810856\tvalidation_1-auc:0.740869\n[14]\tvalidation_0-auc:0.813216\tvalidation_1-auc:0.741451\n[15]\tvalidation_0-auc:0.814749\tvalidation_1-auc:0.74203\n[16]\tvalidation_0-auc:0.816466\tvalidation_1-auc:0.742224\n[17]\tvalidation_0-auc:0.817764\tvalidation_1-auc:0.742711\n[18]\tvalidation_0-auc:0.819885\tvalidation_1-auc:0.743076\n[19]\tvalidation_0-auc:0.821756\tvalidation_1-auc:0.743363\n[20]\tvalidation_0-auc:0.82311\tvalidation_1-auc:0.743783\n[21]\tvalidation_0-auc:0.824243\tvalidation_1-auc:0.743945\n[22]\tvalidation_0-auc:0.825347\tvalidation_1-auc:0.744339\n[23]\tvalidation_0-auc:0.826601\tvalidation_1-auc:0.744607\n[24]\tvalidation_0-auc:0.827962\tvalidation_1-auc:0.744886\n[25]\tvalidation_0-auc:0.829376\tvalidation_1-auc:0.744901\n[26]\tvalidation_0-auc:0.830542\tvalidation_1-auc:0.745246\n[27]\tvalidation_0-auc:0.831513\tvalidation_1-auc:0.745402\n[28]\tvalidation_0-auc:0.833043\tvalidation_1-auc:0.745661\n[29]\tvalidation_0-auc:0.833811\tvalidation_1-auc:0.7456\n[30]\tvalidation_0-auc:0.834992\tvalidation_1-auc:0.745668\n[31]\tvalidation_0-auc:0.836206\tvalidation_1-auc:0.745617\n[32]\tvalidation_0-auc:0.837281\tvalidation_1-auc:0.745663\n[33]\tvalidation_0-auc:0.838158\tvalidation_1-auc:0.745486\n[34]\tvalidation_0-auc:0.839253\tvalidation_1-auc:0.745464\n[35]\tvalidation_0-auc:0.84004\tvalidation_1-auc:0.745529\n[36]\tvalidation_0-auc:0.841688\tvalidation_1-auc:0.745628\n[37]\tvalidation_0-auc:0.842579\tvalidation_1-auc:0.745739\n[38]\tvalidation_0-auc:0.84376\tvalidation_1-auc:0.746009\n[39]\tvalidation_0-auc:0.844589\tvalidation_1-auc:0.746067\n[40]\tvalidation_0-auc:0.845488\tvalidation_1-auc:0.746098\n[41]\tvalidation_0-auc:0.846669\tvalidation_1-auc:0.746428\n[42]\tvalidation_0-auc:0.847253\tvalidation_1-auc:0.746575\n[43]\tvalidation_0-auc:0.847934\tvalidation_1-auc:0.746567\n[44]\tvalidation_0-auc:0.849411\tvalidation_1-auc:0.746765\n[45]\tvalidation_0-auc:0.850004\tvalidation_1-auc:0.746704\n[46]\tvalidation_0-auc:0.850954\tvalidation_1-auc:0.746762\n[47]\tvalidation_0-auc:0.852005\tvalidation_1-auc:0.746786\n[48]\tvalidation_0-auc:0.852651\tvalidation_1-auc:0.74686\n[49]\tvalidation_0-auc:0.85297\tvalidation_1-auc:0.746896\n[50]\tvalidation_0-auc:0.853991\tvalidation_1-auc:0.746777\n[51]\tvalidation_0-auc:0.854521\tvalidation_1-auc:0.746862\n[52]\tvalidation_0-auc:0.85493\tvalidation_1-auc:0.746867\n[53]\tvalidation_0-auc:0.856394\tvalidation_1-auc:0.746851\n[54]\tvalidation_0-auc:0.857076\tvalidation_1-auc:0.746836\n[55]\tvalidation_0-auc:0.858238\tvalidation_1-auc:0.746718\n[56]\tvalidation_0-auc:0.858373\tvalidation_1-auc:0.746741\n[57]\tvalidation_0-auc:0.85918\tvalidation_1-auc:0.746666\n[58]\tvalidation_0-auc:0.860837\tvalidation_1-auc:0.746592\n[59]\tvalidation_0-auc:0.861073\tvalidation_1-auc:0.746594\n[60]\tvalidation_0-auc:0.86139\tvalidation_1-auc:0.746576\n[61]\tvalidation_0-auc:0.862258\tvalidation_1-auc:0.746442\n[62]\tvalidation_0-auc:0.862731\tvalidation_1-auc:0.746344\n[63]\tvalidation_0-auc:0.86366\tvalidation_1-auc:0.746462\n[64]\tvalidation_0-auc:0.864399\tvalidation_1-auc:0.746353\n[65]\tvalidation_0-auc:0.865072\tvalidation_1-auc:0.746441\n[66]\tvalidation_0-auc:0.865733\tvalidation_1-auc:0.746522\n[67]\tvalidation_0-auc:0.86622\tvalidation_1-auc:0.746463\n[68]\tvalidation_0-auc:0.866435\tvalidation_1-auc:0.746527\n[69]\tvalidation_0-auc:0.867475\tvalidation_1-auc:0.746469\n[70]\tvalidation_0-auc:0.867699\tvalidation_1-auc:0.746438\n[71]\tvalidation_0-auc:0.868432\tvalidation_1-auc:0.746332\n[72]\tvalidation_0-auc:0.868669\tvalidation_1-auc:0.746312\n[73]\tvalidation_0-auc:0.869229\tvalidation_1-auc:0.746321\n[74]\tvalidation_0-auc:0.869645\tvalidation_1-auc:0.746385\n[75]\tvalidation_0-auc:0.87021\tvalidation_1-auc:0.746187\n[76]\tvalidation_0-auc:0.871386\tvalidation_1-auc:0.746173\n[77]\tvalidation_0-auc:0.871867\tvalidation_1-auc:0.746223\n[78]\tvalidation_0-auc:0.872598\tvalidation_1-auc:0.746156\n[79]\tvalidation_0-auc:0.872669\tvalidation_1-auc:0.746183\n[80]\tvalidation_0-auc:0.873707\tvalidation_1-auc:0.74626\n[81]\tvalidation_0-auc:0.873803\tvalidation_1-auc:0.746231\n[82]\tvalidation_0-auc:0.873925\tvalidation_1-auc:0.746172\n[83]\tvalidation_0-auc:0.874423\tvalidation_1-auc:0.746065\n[84]\tvalidation_0-auc:0.875459\tvalidation_1-auc:0.745961\n[85]\tvalidation_0-auc:0.876655\tvalidation_1-auc:0.746064\n[86]\tvalidation_0-auc:0.877957\tvalidation_1-auc:0.746068\n[87]\tvalidation_0-auc:0.878203\tvalidation_1-auc:0.74603\n[88]\tvalidation_0-auc:0.879098\tvalidation_1-auc:0.746078\n[89]\tvalidation_0-auc:0.879307\tvalidation_1-auc:0.746033\n[90]\tvalidation_0-auc:0.879613\tvalidation_1-auc:0.746051\n[91]\tvalidation_0-auc:0.880505\tvalidation_1-auc:0.74618\n[92]\tvalidation_0-auc:0.881457\tvalidation_1-auc:0.746253\n[93]\tvalidation_0-auc:0.881937\tvalidation_1-auc:0.746425\n[94]\tvalidation_0-auc:0.882173\tvalidation_1-auc:0.746444\n[95]\tvalidation_0-auc:0.882326\tvalidation_1-auc:0.746396\n[96]\tvalidation_0-auc:0.882498\tvalidation_1-auc:0.74642\n[97]\tvalidation_0-auc:0.883239\tvalidation_1-auc:0.746381\n[98]\tvalidation_0-auc:0.883471\tvalidation_1-auc:0.746249\n[99]\tvalidation_0-auc:0.883845\tvalidation_1-auc:0.746098\n[100]\tvalidation_0-auc:0.884082\tvalidation_1-auc:0.746095\n[101]\tvalidation_0-auc:0.884163\tvalidation_1-auc:0.746031\n[102]\tvalidation_0-auc:0.88422\tvalidation_1-auc:0.746029\n[103]\tvalidation_0-auc:0.884906\tvalidation_1-auc:0.746043\n[104]\tvalidation_0-auc:0.885747\tvalidation_1-auc:0.74577\n[105]\tvalidation_0-auc:0.8863\tvalidation_1-auc:0.745881\n[106]\tvalidation_0-auc:0.887298\tvalidation_1-auc:0.745885\n[107]\tvalidation_0-auc:0.88781\tvalidation_1-auc:0.745889\n[108]\tvalidation_0-auc:0.888821\tvalidation_1-auc:0.745904\n[109]\tvalidation_0-auc:0.889342\tvalidation_1-auc:0.745814\n[110]\tvalidation_0-auc:0.889943\tvalidation_1-auc:0.745897\n[111]\tvalidation_0-auc:0.891229\tvalidation_1-auc:0.745459\n[112]\tvalidation_0-auc:0.892381\tvalidation_1-auc:0.745423\n[113]\tvalidation_0-auc:0.89251\tvalidation_1-auc:0.745353\n[114]\tvalidation_0-auc:0.892645\tvalidation_1-auc:0.745344\n[115]\tvalidation_0-auc:0.892947\tvalidation_1-auc:0.745313\n[116]\tvalidation_0-auc:0.893612\tvalidation_1-auc:0.745288\n[117]\tvalidation_0-auc:0.894667\tvalidation_1-auc:0.745294\n[118]\tvalidation_0-auc:0.894853\tvalidation_1-auc:0.745262\n[119]\tvalidation_0-auc:0.894985\tvalidation_1-auc:0.74526\n[120]\tvalidation_0-auc:0.895107\tvalidation_1-auc:0.745287\n[121]\tvalidation_0-auc:0.895787\tvalidation_1-auc:0.745386\n[122]\tvalidation_0-auc:0.896611\tvalidation_1-auc:0.745496\n[123]\tvalidation_0-auc:0.897448\tvalidation_1-auc:0.74565\n[124]\tvalidation_0-auc:0.898428\tvalidation_1-auc:0.745659\n[125]\tvalidation_0-auc:0.899063\tvalidation_1-auc:0.745469\n[126]\tvalidation_0-auc:0.89982\tvalidation_1-auc:0.74554\n[127]\tvalidation_0-auc:0.899973\tvalidation_1-auc:0.745495\n[128]\tvalidation_0-auc:0.900056\tvalidation_1-auc:0.745526\n[129]\tvalidation_0-auc:0.900175\tvalidation_1-auc:0.745505\n[130]\tvalidation_0-auc:0.90042\tvalidation_1-auc:0.745481\n[131]\tvalidation_0-auc:0.90069\tvalidation_1-auc:0.745502\n[132]\tvalidation_0-auc:0.901315\tvalidation_1-auc:0.745226\n[133]\tvalidation_0-auc:0.901657\tvalidation_1-auc:0.745113\n[134]\tvalidation_0-auc:0.901722\tvalidation_1-auc:0.745137\n[135]\tvalidation_0-auc:0.902804\tvalidation_1-auc:0.745073\n[136]\tvalidation_0-auc:0.903429\tvalidation_1-auc:0.745049\n[137]\tvalidation_0-auc:0.903879\tvalidation_1-auc:0.744799\n[138]\tvalidation_0-auc:0.904009\tvalidation_1-auc:0.744754\n[139]\tvalidation_0-auc:0.904312\tvalidation_1-auc:0.744603\n[140]\tvalidation_0-auc:0.904664\tvalidation_1-auc:0.744552\n[141]\tvalidation_0-auc:0.90506\tvalidation_1-auc:0.744479\n[142]\tvalidation_0-auc:0.905558\tvalidation_1-auc:0.744362\n[143]\tvalidation_0-auc:0.905725\tvalidation_1-auc:0.744383\n[144]\tvalidation_0-auc:0.906095\tvalidation_1-auc:0.744262\n[145]\tvalidation_0-auc:0.90618\tvalidation_1-auc:0.744225\n[146]\tvalidation_0-auc:0.906627\tvalidation_1-auc:0.744335\n[147]\tvalidation_0-auc:0.907566\tvalidation_1-auc:0.744205\n[148]\tvalidation_0-auc:0.90808\tvalidation_1-auc:0.744276\n[149]\tvalidation_0-auc:0.908778\tvalidation_1-auc:0.744196\nStopping. Best iteration:\n[49]\tvalidation_0-auc:0.85297\tvalidation_1-auc:0.746896\n\n" ], [ "# make prediction\npreds = xgb_classifier.predict(dev_datamatrix)\n\npred_label = preds >= 0.5\npred_label = pred_label.astype(int)\nfrom sklearn.metrics import classification_report\nprint(classification_report(dev_datasetL, pred_label))", "/home/xueyunzhe/anaconda3/lib/python3.5/site-packages/sklearn/preprocessing/label.py:151: DeprecationWarning: The truth value of an empty array is ambiguous. Returning False, but in future this will result in an error. Use `array.size > 0` to check that an array is not empty.\n if diff:\n" ], [ "pickle.dump(bst, open(\"../data/m_result/1_1_ratio_xgboost.model\", \"wb\"), 2)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbff1698f4930afd2d79b2bc717ce5c6c08ffc8e
28,291
ipynb
Jupyter Notebook
docs/deWall.ipynb
luigibvl/AlphaShapes.jl
f073366f8ab6b15964c9879dad5ccedddd60f2a1
[ "MIT" ]
null
null
null
docs/deWall.ipynb
luigibvl/AlphaShapes.jl
f073366f8ab6b15964c9879dad5ccedddd60f2a1
[ "MIT" ]
null
null
null
docs/deWall.ipynb
luigibvl/AlphaShapes.jl
f073366f8ab6b15964c9879dad5ccedddd60f2a1
[ "MIT" ]
null
null
null
35.275561
188
0.511576
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
cbff4916b1b074d5eadf5a4b7511a0b588a250b9
17,762
ipynb
Jupyter Notebook
notebooks/basic_motion/basic_motion.ipynb
Ryan-ZL-Lin/jetbot-customization
5ce8619e049c53c83d2197678023f254b7c5fa48
[ "MIT" ]
1
2021-04-29T15:18:58.000Z
2021-04-29T15:18:58.000Z
notebooks/basic_motion/basic_motion.ipynb
Ryan-ZL-Lin/jetbot-customization
5ce8619e049c53c83d2197678023f254b7c5fa48
[ "MIT" ]
null
null
null
notebooks/basic_motion/basic_motion.ipynb
Ryan-ZL-Lin/jetbot-customization
5ce8619e049c53c83d2197678023f254b7c5fa48
[ "MIT" ]
null
null
null
31.661319
468
0.601678
[ [ [ "# Basic Motion\n\nWelcome to JetBot's browser based programming interface! This document is\ncalled a *Jupyter Notebook*, which combines text, code, and graphic\ndisplay all in one! Prett neat, huh? If you're unfamiliar with *Jupyter* we suggest clicking the \n``Help`` drop down menu in the top toolbar. This has useful references for\nprogramming with *Jupyter*. \n\nIn this notebook, we'll cover the basics of controlling JetBot. \n\n### Importing the Robot class\n\nTo get started programming JetBot, we'll need to import the ``Robot`` class. This class\nallows us to easily control the robot's motors! This is contained in the ``jetbot`` package.\n\n> If you're new to Python, a *package* is essentially a folder containing \n> code files. These code files are called *modules*.\n\nTo import the ``Robot`` class, highlight the cell below and press ``ctrl + enter`` or the ``play`` icon above.\nThis will execute the code contained in the cell", "_____no_output_____" ] ], [ [ "from jetbot import Robot", "_____no_output_____" ] ], [ [ "Now that we've imported the ``Robot`` class we can initialize the class *instance* as follows. ", "_____no_output_____" ] ], [ [ "robot = Robot()", "_____no_output_____" ] ], [ [ "### Commanding the robot", "_____no_output_____" ], [ "Now that we've created our ``Robot`` instance we named \"robot\", we can use this instance\nto control the robot. To make the robot spin counterclockwise at 30% of it's max speed\nwe can call the following\n\n> WARNING: This next command will make the robot move! Please make sure the robot has clearance.", "_____no_output_____" ] ], [ [ "robot.left(speed=0.3)", "_____no_output_____" ] ], [ [ "Cool, you should see the robot spin counterclockwise!\n\n> If your robot didn't turn left, that means one of the motors is wired backwards! Try powering down your\n> robot and swapping the terminals that the ``red`` and ``black`` cables of the incorrect motor.\n> \n> REMINDER: Always be careful to check your wiring, and don't change the wiring on a running system!\n\nNow, to stop the robot you can call the ``stop`` method.", "_____no_output_____" ] ], [ [ "robot.stop()", "_____no_output_____" ] ], [ [ "Maybe we only want to run the robot for a set period of time. For that, we can use the Python ``time`` package. ", "_____no_output_____" ] ], [ [ "import time", "_____no_output_____" ] ], [ [ "This package defines the ``sleep`` function, which causes the code execution to block for the specified number of seconds\nbefore running the next command. Try the following to make the robot turn left only for half a second.", "_____no_output_____" ] ], [ [ "robot.left(0.3)\ntime.sleep(0.5)\nrobot.stop()", "_____no_output_____" ] ], [ [ "Great. You should see the robot turn left for a bit and then stop.\n\n> Wondering what happened to the ``speed=`` inside the ``left`` method? Python allows \n> us to set function parameters by either their name, or the order that they are defined\n> (without specifying the name).\n\nThe ``BasicJetbot`` class also has the methods ``right``, ``forward``, and ``backwards``. Try creating your own cell to make\nthe robot move forward at 50% speed for one second.\n\nCreate a new cell by highlighting an existing cell and pressing ``b`` or the ``+`` icon above. Once you've done that, type in the code that you think will make the robot move forward at 50% speed for one second.", "_____no_output_____" ], [ "### Controlling motors individually\n\nAbove we saw how we can control the robot using commands like ``left``, ``right``, etc. But what if we want to set each motor speed \nindividually? Well, there are two ways you can do this\n\nThe first way is to call the ``set_motors`` method. For example, to turn along a left arch for a second we could set the left motor to 30% and the right motor to 60% like follows.", "_____no_output_____" ] ], [ [ "robot.set_motors(0.3, 0.6)\ntime.sleep(1.0)\nrobot.stop()", "_____no_output_____" ] ], [ [ "Great! You should see the robot move along a left arch. But actually, there's another way that we could accomplish the same thing.\n\nThe ``Robot`` class has two attributes named ``left_motor`` and ``right_motor`` that represent each motor individually.\nThese attributes are ``Motor`` class instances, each which contains a ``value`` attribute. This ``value`` attribute\nis a [traitlet](https://github.com/ipython/traitlets) which generates ``events`` when assigned a new value. In the motor\nclass, we attach a function that updates the motor commands whenever the value changes.\n\nSo, to accomplish the exact same thing we did above, we could execute the following.", "_____no_output_____" ] ], [ [ "robot.left_motor.value = 0.34\nrobot.left_motor.alpha = 0.9\nrobot.right_motor.value = 0.34\nrobot.right_motor.alpha = 0.81\n\ntime.sleep(3)\nrobot.left_motor.value = 0.0\nrobot.right_motor.value = 0.0", "_____no_output_____" ] ], [ [ "You should see the robot move in the same exact way!", "_____no_output_____" ], [ "### Link motors to traitlets", "_____no_output_____" ], [ "A really cool feature about these [traitlets](https://github.com/ipython/traitlets) is that we can \nalso link them to other traitlets! This is super handy because Jupyter Notebooks allow us\nto make graphical ``widgets`` that use traitlets under the hood. This means we can attach\nour motors to ``widgets`` to control them from the browser, or just visualize the value.\n\nTo show how to do this, let's create and display two sliders that we'll use to control our motors.", "_____no_output_____" ] ], [ [ "import ipywidgets.widgets as widgets\nfrom IPython.display import display\n\n# create two sliders with range [-1.0, 1.0]\nleft_slider = widgets.FloatSlider(description='left', min=-1.0, max=1.0, step=0.01, orientation='vertical')\nright_slider = widgets.FloatSlider(description='right', min=-1.0, max=1.0, step=0.01, orientation='vertical')\n\n# create a horizontal box container to place the sliders next to eachother\nslider_container = widgets.HBox([left_slider, right_slider])\n\n# display the container in this cell's output\ndisplay(slider_container)", "_____no_output_____" ] ], [ [ "You should see two ``vertical`` sliders displayed above. \n\n> HELPFUL TIP: In Jupyter Lab, you can actually \"pop\" the output of cells into entirely separate window! It will still be \n> connected to the notebook, but displayed separately. This is helpful if we want to pin the output of code we executed elsewhere.\n> To do this, right click the output of the cell and select ``Create New View for Output``. You can then drag the new window\n> to a location you find pleasing.\n\nTry clicking and dragging the sliders up and down. Notice nothing happens when we move the sliders currently. That's because we haven't connected them to motors yet! We'll do that by using the ``link`` function from the traitlets package.", "_____no_output_____" ] ], [ [ "import traitlets\n\nleft_link = traitlets.link((left_slider, 'value'), (robot.left_motor, 'value'))\nright_link = traitlets.link((right_slider, 'value'), (robot.right_motor, 'value'))", "_____no_output_____" ] ], [ [ "Now try dragging the sliders (slowly at first). You should see the respective motor turn!\n\nThe ``link`` function that we created above actually creates a bi-directional link! That means,\nif we set the motor values elsewhere, the sliders will update! Try executing the code block below", "_____no_output_____" ] ], [ [ "robot.forward(0.3)\ntime.sleep(0.5)\nrobot.stop()", "_____no_output_____" ] ], [ [ "You should see the sliders respond to the motor commands! If we want to remove this connection we can call the\n``unlink`` method of each link.", "_____no_output_____" ] ], [ [ "left_link.unlink()\nright_link.unlink()", "_____no_output_____" ] ], [ [ "But what if we don't want a *bi-directional* link, let's say we only want to use the sliders to display the motor values,\nbut not control them. For that we can use the ``dlink`` function. The left input is the ``source`` and the right input is the ``target``", "_____no_output_____" ] ], [ [ "left_link = traitlets.dlink((robot.left_motor, 'value'), (left_slider, 'value'))\nright_link = traitlets.dlink((robot.right_motor, 'value'), (right_slider, 'value'))", "_____no_output_____" ] ], [ [ "Now try moving the sliders. You should see that the robot doesn't respond. But when set the motors using a different method,\nthe sliders will update and display the value!", "_____no_output_____" ], [ "### Attach functions to events", "_____no_output_____" ], [ "Another way to use traitlets, is by attaching functions (like ``forward``) to events. These\nfunctions will get called whenever a change to the object occurs, and will be passed some information about that change\nlike the ``old`` value and the ``new`` value. \n\nLet's create and display some buttons that we'll use to control the robot.", "_____no_output_____" ] ], [ [ "# create buttons\nbutton_layout = widgets.Layout(width='100px', height='80px', align_self='center')\nstop_button = widgets.Button(description='stop', button_style='danger', layout=button_layout)\nforward_button = widgets.Button(description='forward', layout=button_layout)\nbackward_button = widgets.Button(description='backward', layout=button_layout)\nleft_button = widgets.Button(description='left', layout=button_layout)\nright_button = widgets.Button(description='right', layout=button_layout)\n\n# display buttons\nmiddle_box = widgets.HBox([left_button, stop_button, right_button], layout=widgets.Layout(align_self='center'))\ncontrols_box = widgets.VBox([forward_button, middle_box, backward_button])\ndisplay(controls_box)", "_____no_output_____" ] ], [ [ "You should see a set of robot controls displayed above! But right now they wont do anything. To do that\nwe'll need to create some functions that we'll attach to the button's ``on_click`` event. ", "_____no_output_____" ] ], [ [ "def stop(change):\n robot.stop()\n \ndef step_forward(change):\n robot.forward(0.3)\n time.sleep(0.5)\n robot.stop()\n\ndef step_backward(change):\n robot.backward(0.3)\n time.sleep(0.5)\n robot.stop()\n\ndef step_left(change):\n robot.left(0.3)\n time.sleep(0.5)\n robot.stop()\n\ndef step_right(change):\n robot.right(0.3)\n time.sleep(0.5)\n robot.stop()", "_____no_output_____" ] ], [ [ "Now that we've defined the functions, let's attach them to the on-click events of each button", "_____no_output_____" ] ], [ [ "# link buttons to actions\nstop_button.on_click(stop)\nforward_button.on_click(step_forward)\nbackward_button.on_click(step_backward)\nleft_button.on_click(step_left)\nright_button.on_click(step_right)", "_____no_output_____" ] ], [ [ "Now when you click each button, you should see the robot move!", "_____no_output_____" ], [ "### Heartbeat Killswitch\n\nHere we show how to connect a 'heartbeat' to stop the robot from moving. This is a simple way to detect if the robot connection is alive. You can lower the slider below to reduce the period (in seconds) of the heartbeat. If a round-trip communication between broswer cannot be made within two heartbeats, the '`status`' attribute of the heartbeat will be set ``dead``. As soon as the connection is restored, the ``status`` attribute will return to ``alive``.", "_____no_output_____" ] ], [ [ "from jetbot import Heartbeat\n\nheartbeat = Heartbeat()\n\n# this function will be called when heartbeat 'alive' status changes\ndef handle_heartbeat_status(change):\n if change['new'] == Heartbeat.Status.dead:\n robot.stop()\n \nheartbeat.observe(handle_heartbeat_status, names='status')\n\nperiod_slider = widgets.FloatSlider(description='period', min=0.001, max=0.5, step=0.01, value=0.5)\ntraitlets.dlink((period_slider, 'value'), (heartbeat, 'period'))\n\ndisplay(period_slider, heartbeat.pulseout)", "_____no_output_____" ] ], [ [ "Try executing the code below to start the motors, and then lower the slider to see what happens. You can also try disconnecting your robot or PC.", "_____no_output_____" ] ], [ [ "robot.left(0.2) \n\n# now lower the `period` slider above until the network heartbeat can't be satisfied", "_____no_output_____" ] ], [ [ "### Conclusion\n\nThat's it for this example notebook! Hopefully you feel confident that you can program your robot to move around now :)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cbff4e7e4c7cacfdd139832e8b88b4cfac94719a
122,639
ipynb
Jupyter Notebook
ch14/ch14_part2.ipynb
snapbuy/080223
3a41f35ba9a616e9c7ba5eee34791f224d9ec4b2
[ "MIT" ]
16
2021-05-12T02:18:03.000Z
2022-02-09T05:29:18.000Z
ch14/ch14_part2.ipynb
snapbuy/080223
3a41f35ba9a616e9c7ba5eee34791f224d9ec4b2
[ "MIT" ]
null
null
null
ch14/ch14_part2.ipynb
snapbuy/080223
3a41f35ba9a616e9c7ba5eee34791f224d9ec4b2
[ "MIT" ]
21
2021-06-04T06:14:28.000Z
2022-03-29T07:40:03.000Z
47.719455
846
0.592666
[ [ [ "# 머신 러닝 교과서 3판", "_____no_output_____" ], [ "# 14장 - 텐서플로의 구조 자세히 알아보기 (2/3)", "_____no_output_____" ], [ "**아래 링크를 통해 이 노트북을 주피터 노트북 뷰어(nbviewer.jupyter.org)로 보거나 구글 코랩(colab.research.google.com)에서 실행할 수 있습니다.**\n\n<table class=\"tfo-notebook-buttons\" align=\"left\">\n <td>\n <a target=\"_blank\" href=\"https://nbviewer.jupyter.org/github/rickiepark/python-machine-learning-book-3rd-edition/blob/master/ch14/ch14_part2.ipynb\"><img src=\"https://jupyter.org/assets/main-logo.svg\" width=\"28\" />주피터 노트북 뷰어로 보기</a>\n </td>\n <td>\n <a target=\"_blank\" href=\"https://colab.research.google.com/github/rickiepark/python-machine-learning-book-3rd-edition/blob/master/ch14/ch14_part2.ipynb\"><img src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" />구글 코랩(Colab)에서 실행하기</a>\n </td>\n</table>", "_____no_output_____" ], [ "### 목차", "_____no_output_____" ], [ "- 텐서플로 추정기\n - 특성 열 사용하기\n - 사전에 준비된 추정기로 머신 러닝 수행하기", "_____no_output_____" ] ], [ [ "import numpy as np\nimport tensorflow as tf\nimport pandas as pd\n\nfrom IPython.display import Image", "_____no_output_____" ], [ "tf.__version__", "_____no_output_____" ] ], [ [ "## 텐서플로 추정기\n\n##### 사전에 준비된 추정기 사용하는 단계\n\n * **단계 1:** 데이터 로딩을 위해 입력 함수 정의하기\n * **단계 2:** 추정기와 데이터 사이를 연결하기 위해 특성 열 정의하기\n * **단계 3:** 추정기 객체를 만들거나 케라스 모델을 추정기로 바꾸기\n * **단계 4:** 추정기 사용하기: train() evaluate() predict() ", "_____no_output_____" ] ], [ [ "tf.random.set_seed(1)\nnp.random.seed(1)", "_____no_output_____" ] ], [ [ "### 특성 열 사용하기\n\n\n * 정의: https://developers.google.com/machine-learning/glossary/#feature_columns\n * 문서: https://www.tensorflow.org/api_docs/python/tf/feature_column", "_____no_output_____" ] ], [ [ "Image(url='https://git.io/JL56E', width=700)", "_____no_output_____" ], [ "dataset_path = tf.keras.utils.get_file(\"auto-mpg.data\", \n (\"http://archive.ics.uci.edu/ml/machine-learning-databases\"\n \"/auto-mpg/auto-mpg.data\"))\n\ncolumn_names = ['MPG', 'Cylinders', 'Displacement', 'Horsepower',\n 'Weight', 'Acceleration', 'ModelYear', 'Origin']\n\ndf = pd.read_csv(dataset_path, names=column_names,\n na_values = \"?\", comment='\\t',\n sep=\" \", skipinitialspace=True)\n\ndf.tail()", "Downloading data from http://archive.ics.uci.edu/ml/machine-learning-databases/auto-mpg/auto-mpg.data\n32768/30286 [================================] - 0s 1us/step\n" ], [ "print(df.isna().sum())\n\ndf = df.dropna()\ndf = df.reset_index(drop=True)\ndf.tail()", "MPG 0\nCylinders 0\nDisplacement 0\nHorsepower 6\nWeight 0\nAcceleration 0\nModelYear 0\nOrigin 0\ndtype: int64\n" ], [ "import sklearn\nimport sklearn.model_selection\n\n\ndf_train, df_test = sklearn.model_selection.train_test_split(df, train_size=0.8)\ntrain_stats = df_train.describe().transpose()\ntrain_stats", "_____no_output_____" ], [ "numeric_column_names = ['Cylinders', 'Displacement', 'Horsepower', 'Weight', 'Acceleration']\n\ndf_train_norm, df_test_norm = df_train.copy(), df_test.copy()\n\nfor col_name in numeric_column_names:\n mean = train_stats.loc[col_name, 'mean']\n std = train_stats.loc[col_name, 'std']\n df_train_norm.loc[:, col_name] = (df_train_norm.loc[:, col_name] - mean)/std\n df_test_norm.loc[:, col_name] = (df_test_norm.loc[:, col_name] - mean)/std\n \ndf_train_norm.tail()", "_____no_output_____" ] ], [ [ "#### 수치형 열", "_____no_output_____" ] ], [ [ "numeric_features = []\n\nfor col_name in numeric_column_names:\n numeric_features.append(tf.feature_column.numeric_column(key=col_name))\n \nnumeric_features", "_____no_output_____" ], [ "feature_year = tf.feature_column.numeric_column(key=\"ModelYear\")\n\nbucketized_features = []\n\nbucketized_features.append(tf.feature_column.bucketized_column(\n source_column=feature_year,\n boundaries=[73, 76, 79]))\n\nprint(bucketized_features)", "[BucketizedColumn(source_column=NumericColumn(key='ModelYear', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None), boundaries=(73, 76, 79))]\n" ], [ "feature_origin = tf.feature_column.categorical_column_with_vocabulary_list(\n key='Origin',\n vocabulary_list=[1, 2, 3])\n\ncategorical_indicator_features = []\ncategorical_indicator_features.append(tf.feature_column.indicator_column(feature_origin))\n\nprint(categorical_indicator_features)", "[IndicatorColumn(categorical_column=VocabularyListCategoricalColumn(key='Origin', vocabulary_list=(1, 2, 3), dtype=tf.int64, default_value=-1, num_oov_buckets=0))]\n" ] ], [ [ "### 사전에 준비된 추정기로 머신러닝 수행하기", "_____no_output_____" ] ], [ [ "def train_input_fn(df_train, batch_size=8):\n df = df_train.copy()\n train_x, train_y = df, df.pop('MPG')\n dataset = tf.data.Dataset.from_tensor_slices((dict(train_x), train_y))\n\n # 셔플, 반복, 배치\n return dataset.shuffle(1000).repeat().batch(batch_size)\n\n## 조사\nds = train_input_fn(df_train_norm)\nbatch = next(iter(ds))\nprint('키:', batch[0].keys())\nprint('ModelYear:', batch[0]['ModelYear'])", "키: dict_keys(['Cylinders', 'Displacement', 'Horsepower', 'Weight', 'Acceleration', 'ModelYear', 'Origin'])\nModelYear: tf.Tensor([82 78 76 72 78 73 70 78], shape=(8,), dtype=int64)\n" ], [ "all_feature_columns = (numeric_features + \n bucketized_features + \n categorical_indicator_features)\n\nprint(all_feature_columns)", "[NumericColumn(key='Cylinders', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None), NumericColumn(key='Displacement', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None), NumericColumn(key='Horsepower', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None), NumericColumn(key='Weight', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None), NumericColumn(key='Acceleration', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None), BucketizedColumn(source_column=NumericColumn(key='ModelYear', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None), boundaries=(73, 76, 79)), IndicatorColumn(categorical_column=VocabularyListCategoricalColumn(key='Origin', vocabulary_list=(1, 2, 3), dtype=tf.int64, default_value=-1, num_oov_buckets=0))]\n" ], [ "regressor = tf.estimator.DNNRegressor(\n feature_columns=all_feature_columns,\n hidden_units=[32, 10],\n model_dir='models/autompg-dnnregressor/')", "INFO:tensorflow:Using default config.\nINFO:tensorflow:Using config: {'_model_dir': 'models/autompg-dnnregressor/', '_tf_random_seed': None, '_save_summary_steps': 100, '_save_checkpoints_steps': None, '_save_checkpoints_secs': 600, '_session_config': allow_soft_placement: true\ngraph_options {\n rewrite_options {\n meta_optimizer_iterations: ONE\n }\n}\n, '_keep_checkpoint_max': 5, '_keep_checkpoint_every_n_hours': 10000, '_log_step_count_steps': 100, '_train_distribute': None, '_device_fn': None, '_protocol': None, '_eval_distribute': None, '_experimental_distribute': None, '_experimental_max_worker_delay_secs': None, '_session_creation_timeout_secs': 7200, '_checkpoint_save_graph_def': True, '_service': None, '_cluster_spec': ClusterSpec({}), '_task_type': 'worker', '_task_id': 0, '_global_id_in_cluster': 0, '_master': '', '_evaluation_master': '', '_is_chief': True, '_num_ps_replicas': 0, '_num_worker_replicas': 1}\n" ], [ "EPOCHS = 1000\nBATCH_SIZE = 8\ntotal_steps = EPOCHS * int(np.ceil(len(df_train) / BATCH_SIZE))\nprint('훈련 스텝:', total_steps)\n\nregressor.train(\n input_fn=lambda:train_input_fn(df_train_norm, batch_size=BATCH_SIZE),\n steps=total_steps)", "훈련 스텝: 40000\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/training_util.py:236: Variable.initialized_value (from tensorflow.python.ops.variables) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse Variable.read_value. Variables in 2.X are initialized automatically both in eager and graph (inside tf.defun) contexts.\nINFO:tensorflow:Calling model_fn.\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/optimizer_v2/adagrad.py:83: calling Constant.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\nInstructions for updating:\nCall initializer instance with the dtype argument instead of passing it to the constructor\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Create CheckpointSaverHook.\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 0...\nINFO:tensorflow:Saving checkpoints for 0 into models/autompg-dnnregressor/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 0...\nINFO:tensorflow:loss = 499.81906, step = 0\nINFO:tensorflow:global_step/sec: 419.833\nINFO:tensorflow:loss = 728.64026, step = 100 (0.239 sec)\nINFO:tensorflow:global_step/sec: 516.74\nINFO:tensorflow:loss = 502.44, step = 200 (0.195 sec)\nWARNING:tensorflow:It seems that global step (tf.train.get_global_step) has not been increased. Current value (could be stable): 201 vs previous value: 201. You could increase the global step by passing tf.train.get_global_step() to Optimizer.apply_gradients or Optimizer.minimize.\nINFO:tensorflow:global_step/sec: 500.97\nINFO:tensorflow:loss = 569.75073, step = 300 (0.201 sec)\nINFO:tensorflow:global_step/sec: 504.106\nINFO:tensorflow:loss = 789.51416, step = 400 (0.198 sec)\nINFO:tensorflow:global_step/sec: 414.888\nINFO:tensorflow:loss = 401.0173, step = 500 (0.240 sec)\nINFO:tensorflow:global_step/sec: 502.235\nINFO:tensorflow:loss = 869.59467, step = 600 (0.198 sec)\nINFO:tensorflow:global_step/sec: 508.569\nINFO:tensorflow:loss = 460.11295, step = 700 (0.197 sec)\nINFO:tensorflow:global_step/sec: 513.669\nINFO:tensorflow:loss = 466.1911, step = 800 (0.196 sec)\nINFO:tensorflow:global_step/sec: 504.5\nINFO:tensorflow:loss = 412.2461, step = 900 (0.197 sec)\nINFO:tensorflow:global_step/sec: 497.02\nINFO:tensorflow:loss = 703.21155, step = 1000 (0.200 sec)\nINFO:tensorflow:global_step/sec: 520.567\nINFO:tensorflow:loss = 740.5027, step = 1100 (0.195 sec)\nINFO:tensorflow:global_step/sec: 518.766\nINFO:tensorflow:loss = 488.0118, step = 1200 (0.190 sec)\nINFO:tensorflow:global_step/sec: 517.295\nINFO:tensorflow:loss = 676.7937, step = 1300 (0.196 sec)\nINFO:tensorflow:global_step/sec: 457.364\nINFO:tensorflow:loss = 630.02423, step = 1400 (0.219 sec)\nINFO:tensorflow:global_step/sec: 422.635\nINFO:tensorflow:loss = 425.80823, step = 1500 (0.233 sec)\nINFO:tensorflow:global_step/sec: 413.241\nINFO:tensorflow:loss = 544.6715, step = 1600 (0.242 sec)\nINFO:tensorflow:global_step/sec: 496.798\nINFO:tensorflow:loss = 444.89728, step = 1700 (0.204 sec)\nINFO:tensorflow:global_step/sec: 511.606\nINFO:tensorflow:loss = 362.77234, step = 1800 (0.194 sec)\nINFO:tensorflow:global_step/sec: 510.578\nINFO:tensorflow:loss = 433.21918, step = 1900 (0.196 sec)\nINFO:tensorflow:global_step/sec: 510.345\nINFO:tensorflow:loss = 422.28336, step = 2000 (0.197 sec)\nINFO:tensorflow:global_step/sec: 509.676\nINFO:tensorflow:loss = 386.2972, step = 2100 (0.195 sec)\nINFO:tensorflow:global_step/sec: 520.059\nINFO:tensorflow:loss = 408.16916, step = 2200 (0.192 sec)\nINFO:tensorflow:global_step/sec: 516.655\nINFO:tensorflow:loss = 530.0901, step = 2300 (0.196 sec)\nINFO:tensorflow:global_step/sec: 494.212\nINFO:tensorflow:loss = 418.70773, step = 2400 (0.198 sec)\nINFO:tensorflow:global_step/sec: 514.72\nINFO:tensorflow:loss = 393.56134, step = 2500 (0.197 sec)\nINFO:tensorflow:global_step/sec: 514.336\nINFO:tensorflow:loss = 403.1589, step = 2600 (0.194 sec)\nINFO:tensorflow:global_step/sec: 518.325\nINFO:tensorflow:loss = 549.42926, step = 2700 (0.193 sec)\nINFO:tensorflow:global_step/sec: 505.443\nINFO:tensorflow:loss = 377.96475, step = 2800 (0.198 sec)\nINFO:tensorflow:global_step/sec: 503.895\nINFO:tensorflow:loss = 407.91156, step = 2900 (0.197 sec)\nINFO:tensorflow:global_step/sec: 496.873\nINFO:tensorflow:loss = 196.11053, step = 3000 (0.206 sec)\nINFO:tensorflow:global_step/sec: 500.207\nINFO:tensorflow:loss = 205.37964, step = 3100 (0.194 sec)\nWARNING:tensorflow:It seems that global step (tf.train.get_global_step) has not been increased. Current value (could be stable): 3178 vs previous value: 3178. You could increase the global step by passing tf.train.get_global_step() to Optimizer.apply_gradients or Optimizer.minimize.\nINFO:tensorflow:global_step/sec: 489.039\nINFO:tensorflow:loss = 362.1402, step = 3200 (0.206 sec)\nINFO:tensorflow:global_step/sec: 509.195\nINFO:tensorflow:loss = 338.09937, step = 3300 (0.197 sec)\nINFO:tensorflow:global_step/sec: 497.024\nINFO:tensorflow:loss = 334.89508, step = 3400 (0.202 sec)\nINFO:tensorflow:global_step/sec: 417.552\nINFO:tensorflow:loss = 426.8413, step = 3500 (0.236 sec)\nINFO:tensorflow:global_step/sec: 441.509\nINFO:tensorflow:loss = 424.1369, step = 3600 (0.230 sec)\nINFO:tensorflow:global_step/sec: 497.343\nINFO:tensorflow:loss = 251.33066, step = 3700 (0.198 sec)\nINFO:tensorflow:global_step/sec: 513.948\nINFO:tensorflow:loss = 387.80347, step = 3800 (0.197 sec)\nINFO:tensorflow:global_step/sec: 506.219\nINFO:tensorflow:loss = 234.63878, step = 3900 (0.197 sec)\nINFO:tensorflow:global_step/sec: 493.946\nINFO:tensorflow:loss = 191.92566, step = 4000 (0.201 sec)\nINFO:tensorflow:global_step/sec: 521.304\nINFO:tensorflow:loss = 234.72676, step = 4100 (0.194 sec)\nINFO:tensorflow:global_step/sec: 497.557\nINFO:tensorflow:loss = 343.8628, step = 4200 (0.203 sec)\nINFO:tensorflow:global_step/sec: 507.644\nINFO:tensorflow:loss = 312.6717, step = 4300 (0.195 sec)\nINFO:tensorflow:global_step/sec: 525.339\nINFO:tensorflow:loss = 177.18924, step = 4400 (0.193 sec)\nWARNING:tensorflow:It seems that global step (tf.train.get_global_step) has not been increased. Current value (could be stable): 4481 vs previous value: 4481. You could increase the global step by passing tf.train.get_global_step() to Optimizer.apply_gradients or Optimizer.minimize.\nINFO:tensorflow:global_step/sec: 484.307\nINFO:tensorflow:loss = 244.99046, step = 4500 (0.206 sec)\nINFO:tensorflow:global_step/sec: 519.462\nINFO:tensorflow:loss = 360.98297, step = 4600 (0.191 sec)\nINFO:tensorflow:global_step/sec: 525.67\nINFO:tensorflow:loss = 264.12756, step = 4700 (0.190 sec)\nINFO:tensorflow:global_step/sec: 497.202\nINFO:tensorflow:loss = 375.1087, step = 4800 (0.199 sec)\nINFO:tensorflow:global_step/sec: 525.763\nINFO:tensorflow:loss = 312.72815, step = 4900 (0.193 sec)\nWARNING:tensorflow:It seems that global step (tf.train.get_global_step) has not been increased. Current value (could be stable): 4901 vs previous value: 4901. You could increase the global step by passing tf.train.get_global_step() to Optimizer.apply_gradients or Optimizer.minimize.\nINFO:tensorflow:global_step/sec: 483.175\nINFO:tensorflow:loss = 425.18405, step = 5000 (0.208 sec)\nINFO:tensorflow:global_step/sec: 520.09\nINFO:tensorflow:loss = 331.28525, step = 5100 (0.192 sec)\nINFO:tensorflow:global_step/sec: 523.058\nINFO:tensorflow:loss = 289.4488, step = 5200 (0.191 sec)\nINFO:tensorflow:global_step/sec: 524.361\nINFO:tensorflow:loss = 105.40331, step = 5300 (0.191 sec)\nINFO:tensorflow:global_step/sec: 497.483\nINFO:tensorflow:loss = 381.04987, step = 5400 (0.201 sec)\nINFO:tensorflow:global_step/sec: 430.887\nINFO:tensorflow:loss = 167.82481, step = 5500 (0.232 sec)\nINFO:tensorflow:global_step/sec: 417.231\nINFO:tensorflow:loss = 270.53317, step = 5600 (0.237 sec)\nINFO:tensorflow:global_step/sec: 495.971\nINFO:tensorflow:loss = 185.95865, step = 5700 (0.205 sec)\nINFO:tensorflow:global_step/sec: 517.616\nINFO:tensorflow:loss = 174.55894, step = 5800 (0.191 sec)\nINFO:tensorflow:global_step/sec: 507.642\nINFO:tensorflow:loss = 198.75955, step = 5900 (0.196 sec)\nWARNING:tensorflow:It seems that global step (tf.train.get_global_step) has not been increased. Current value (could be stable): 5903 vs previous value: 5903. You could increase the global step by passing tf.train.get_global_step() to Optimizer.apply_gradients or Optimizer.minimize.\nINFO:tensorflow:global_step/sec: 467.793\nINFO:tensorflow:loss = 147.48232, step = 6000 (0.215 sec)\nINFO:tensorflow:global_step/sec: 517.707\nINFO:tensorflow:loss = 125.25069, step = 6100 (0.193 sec)\nINFO:tensorflow:global_step/sec: 512.664\nINFO:tensorflow:loss = 106.06342, step = 6200 (0.196 sec)\nINFO:tensorflow:global_step/sec: 514.783\nINFO:tensorflow:loss = 311.43573, step = 6300 (0.194 sec)\nINFO:tensorflow:global_step/sec: 491.523\nINFO:tensorflow:loss = 112.763245, step = 6400 (0.201 sec)\nINFO:tensorflow:global_step/sec: 467.031\nINFO:tensorflow:loss = 328.1735, step = 6500 (0.217 sec)\nINFO:tensorflow:global_step/sec: 420.532\nINFO:tensorflow:loss = 313.0763, step = 6600 (0.237 sec)\nINFO:tensorflow:global_step/sec: 525.221\nINFO:tensorflow:loss = 272.4488, step = 6700 (0.189 sec)\nINFO:tensorflow:global_step/sec: 522.413\nINFO:tensorflow:loss = 133.08063, step = 6800 (0.193 sec)\nINFO:tensorflow:global_step/sec: 495.609\nINFO:tensorflow:loss = 205.16248, step = 6900 (0.200 sec)\nINFO:tensorflow:global_step/sec: 509.382\nINFO:tensorflow:loss = 137.91165, step = 7000 (0.198 sec)\nINFO:tensorflow:global_step/sec: 510.626\nINFO:tensorflow:loss = 239.80998, step = 7100 (0.196 sec)\nINFO:tensorflow:global_step/sec: 514.014\nINFO:tensorflow:loss = 137.1398, step = 7200 (0.193 sec)\nINFO:tensorflow:global_step/sec: 520.586\nINFO:tensorflow:loss = 256.19214, step = 7300 (0.194 sec)\nINFO:tensorflow:global_step/sec: 421.086\nINFO:tensorflow:loss = 155.69348, step = 7400 (0.236 sec)\nINFO:tensorflow:global_step/sec: 500.677\nINFO:tensorflow:loss = 75.53708, step = 7500 (0.198 sec)\nINFO:tensorflow:global_step/sec: 514.989\nINFO:tensorflow:loss = 73.96437, step = 7600 (0.197 sec)\nINFO:tensorflow:global_step/sec: 525.608\nINFO:tensorflow:loss = 159.74828, step = 7700 (0.188 sec)\nINFO:tensorflow:global_step/sec: 526.45\nINFO:tensorflow:loss = 292.30182, step = 7800 (0.192 sec)\nINFO:tensorflow:global_step/sec: 486.385\nINFO:tensorflow:loss = 153.95279, step = 7900 (0.207 sec)\nINFO:tensorflow:global_step/sec: 481.82\nINFO:tensorflow:loss = 177.09927, step = 8000 (0.207 sec)\nINFO:tensorflow:global_step/sec: 505.639\nINFO:tensorflow:loss = 125.36424, step = 8100 (0.197 sec)\nINFO:tensorflow:global_step/sec: 517.065\nINFO:tensorflow:loss = 205.31622, step = 8200 (0.192 sec)\nINFO:tensorflow:global_step/sec: 504.22\nINFO:tensorflow:loss = 80.79169, step = 8300 (0.200 sec)\nINFO:tensorflow:global_step/sec: 518.965\nINFO:tensorflow:loss = 119.37741, step = 8400 (0.191 sec)\nINFO:tensorflow:global_step/sec: 489.05\nINFO:tensorflow:loss = 108.908966, step = 8500 (0.206 sec)\nINFO:tensorflow:global_step/sec: 507.669\nINFO:tensorflow:loss = 118.81102, step = 8600 (0.196 sec)\nINFO:tensorflow:global_step/sec: 520.429\nINFO:tensorflow:loss = 138.83594, step = 8700 (0.193 sec)\nINFO:tensorflow:global_step/sec: 502.389\nINFO:tensorflow:loss = 162.79666, step = 8800 (0.198 sec)\nINFO:tensorflow:global_step/sec: 523.123\nINFO:tensorflow:loss = 106.664215, step = 8900 (0.189 sec)\nINFO:tensorflow:global_step/sec: 487.641\nINFO:tensorflow:loss = 100.30021, step = 9000 (0.205 sec)\nINFO:tensorflow:global_step/sec: 521.931\nINFO:tensorflow:loss = 174.81514, step = 9100 (0.193 sec)\nINFO:tensorflow:global_step/sec: 516.452\nINFO:tensorflow:loss = 139.92159, step = 9200 (0.193 sec)\nINFO:tensorflow:global_step/sec: 500.696\nINFO:tensorflow:loss = 121.40532, step = 9300 (0.200 sec)\nINFO:tensorflow:global_step/sec: 498.287\nINFO:tensorflow:loss = 133.38837, step = 9400 (0.203 sec)\nINFO:tensorflow:global_step/sec: 482.121\nINFO:tensorflow:loss = 68.1252, step = 9500 (0.205 sec)\nINFO:tensorflow:global_step/sec: 471.003\nINFO:tensorflow:loss = 56.009064, step = 9600 (0.211 sec)\nINFO:tensorflow:global_step/sec: 515.948\nINFO:tensorflow:loss = 42.99523, step = 9700 (0.198 sec)\nINFO:tensorflow:global_step/sec: 501.084\nINFO:tensorflow:loss = 79.84474, step = 9800 (0.195 sec)\nINFO:tensorflow:global_step/sec: 523.155\nINFO:tensorflow:loss = 119.99466, step = 9900 (0.194 sec)\nINFO:tensorflow:global_step/sec: 489.018\nINFO:tensorflow:loss = 172.84372, step = 10000 (0.204 sec)\nINFO:tensorflow:global_step/sec: 468.009\nINFO:tensorflow:loss = 119.95914, step = 10100 (0.211 sec)\nINFO:tensorflow:global_step/sec: 519.887\nINFO:tensorflow:loss = 73.78006, step = 10200 (0.195 sec)\nINFO:tensorflow:global_step/sec: 514.228\nINFO:tensorflow:loss = 91.49827, step = 10300 (0.191 sec)\nINFO:tensorflow:global_step/sec: 459.054\nINFO:tensorflow:loss = 131.61583, step = 10400 (0.220 sec)\nINFO:tensorflow:global_step/sec: 469.455\nINFO:tensorflow:loss = 44.48391, step = 10500 (0.214 sec)\nINFO:tensorflow:global_step/sec: 504.339\nINFO:tensorflow:loss = 134.62445, step = 10600 (0.197 sec)\nINFO:tensorflow:global_step/sec: 512.982\nINFO:tensorflow:loss = 37.590042, step = 10700 (0.195 sec)\nINFO:tensorflow:global_step/sec: 490.179\nINFO:tensorflow:loss = 70.58362, step = 10800 (0.206 sec)\nINFO:tensorflow:global_step/sec: 517.093\nINFO:tensorflow:loss = 75.58607, step = 10900 (0.193 sec)\nINFO:tensorflow:global_step/sec: 444.86\nINFO:tensorflow:loss = 40.550606, step = 11000 (0.224 sec)\nINFO:tensorflow:global_step/sec: 438.798\nINFO:tensorflow:loss = 65.27162, step = 11100 (0.228 sec)\nINFO:tensorflow:global_step/sec: 515.489\nINFO:tensorflow:loss = 112.3871, step = 11200 (0.194 sec)\nINFO:tensorflow:global_step/sec: 520.289\nINFO:tensorflow:loss = 42.775322, step = 11300 (0.192 sec)\nINFO:tensorflow:global_step/sec: 495.629\nINFO:tensorflow:loss = 42.64663, step = 11400 (0.201 sec)\nINFO:tensorflow:global_step/sec: 499.638\nINFO:tensorflow:loss = 91.327484, step = 11500 (0.202 sec)\nINFO:tensorflow:global_step/sec: 504.569\nINFO:tensorflow:loss = 20.98386, step = 11600 (0.198 sec)\nINFO:tensorflow:global_step/sec: 516.401\nINFO:tensorflow:loss = 31.51747, step = 11700 (0.192 sec)\nINFO:tensorflow:global_step/sec: 520.591\nINFO:tensorflow:loss = 103.9842, step = 11800 (0.194 sec)\nINFO:tensorflow:global_step/sec: 506.536\nINFO:tensorflow:loss = 26.423393, step = 11900 (0.197 sec)\nINFO:tensorflow:global_step/sec: 495.607\nINFO:tensorflow:loss = 51.36325, step = 12000 (0.201 sec)\nINFO:tensorflow:global_step/sec: 515.926\nINFO:tensorflow:loss = 56.5116, step = 12100 (0.194 sec)\nINFO:tensorflow:global_step/sec: 482.278\nINFO:tensorflow:loss = 90.91087, step = 12200 (0.208 sec)\nINFO:tensorflow:global_step/sec: 517.533\nINFO:tensorflow:loss = 36.43811, step = 12300 (0.192 sec)\nINFO:tensorflow:global_step/sec: 488.773\nINFO:tensorflow:loss = 38.33581, step = 12400 (0.207 sec)\nINFO:tensorflow:global_step/sec: 488.051\nINFO:tensorflow:loss = 111.93008, step = 12500 (0.201 sec)\nINFO:tensorflow:global_step/sec: 518.809\nINFO:tensorflow:loss = 30.991726, step = 12600 (0.194 sec)\nINFO:tensorflow:global_step/sec: 508.808\nINFO:tensorflow:loss = 57.6763, step = 12700 (0.194 sec)\nINFO:tensorflow:global_step/sec: 514.085\nINFO:tensorflow:loss = 31.296913, step = 12800 (0.196 sec)\nINFO:tensorflow:global_step/sec: 503.45\nINFO:tensorflow:loss = 10.003127, step = 12900 (0.200 sec)\nINFO:tensorflow:global_step/sec: 411.522\nINFO:tensorflow:loss = 15.536169, step = 13000 (0.244 sec)\nINFO:tensorflow:global_step/sec: 448.52\nINFO:tensorflow:loss = 37.387733, step = 13100 (0.222 sec)\nINFO:tensorflow:global_step/sec: 500.079\nINFO:tensorflow:loss = 33.15093, step = 13200 (0.202 sec)\nINFO:tensorflow:global_step/sec: 492.015\nINFO:tensorflow:loss = 42.638702, step = 13300 (0.202 sec)\nINFO:tensorflow:global_step/sec: 438.85\nINFO:tensorflow:loss = 33.25905, step = 13400 (0.227 sec)\nINFO:tensorflow:global_step/sec: 493.106\nINFO:tensorflow:loss = 26.396673, step = 13500 (0.201 sec)\nINFO:tensorflow:global_step/sec: 519.96\nINFO:tensorflow:loss = 10.59149, step = 13600 (0.195 sec)\nINFO:tensorflow:global_step/sec: 515.809\nINFO:tensorflow:loss = 25.44658, step = 13700 (0.194 sec)\nINFO:tensorflow:global_step/sec: 474.454\nINFO:tensorflow:loss = 31.416142, step = 13800 (0.208 sec)\nINFO:tensorflow:global_step/sec: 415.866\nINFO:tensorflow:loss = 34.952724, step = 13900 (0.243 sec)\nINFO:tensorflow:global_step/sec: 404.79\nINFO:tensorflow:loss = 38.247177, step = 14000 (0.245 sec)\nINFO:tensorflow:global_step/sec: 414.502\nINFO:tensorflow:loss = 31.504301, step = 14100 (0.245 sec)\nINFO:tensorflow:global_step/sec: 416.385\nINFO:tensorflow:loss = 42.041656, step = 14200 (0.237 sec)\nINFO:tensorflow:global_step/sec: 431.224\nINFO:tensorflow:loss = 23.24776, step = 14300 (0.234 sec)\nINFO:tensorflow:global_step/sec: 503.246\nINFO:tensorflow:loss = 63.975765, step = 14400 (0.197 sec)\nINFO:tensorflow:global_step/sec: 520.47\nINFO:tensorflow:loss = 33.07982, step = 14500 (0.192 sec)\nINFO:tensorflow:global_step/sec: 496.651\nINFO:tensorflow:loss = 43.42848, step = 14600 (0.203 sec)\nINFO:tensorflow:global_step/sec: 512.589\nINFO:tensorflow:loss = 11.826054, step = 14700 (0.194 sec)\nINFO:tensorflow:global_step/sec: 505.578\nINFO:tensorflow:loss = 39.25447, step = 14800 (0.196 sec)\nINFO:tensorflow:global_step/sec: 513.171\nINFO:tensorflow:loss = 11.81307, step = 14900 (0.197 sec)\nINFO:tensorflow:global_step/sec: 524.087\nINFO:tensorflow:loss = 15.21232, step = 15000 (0.193 sec)\nINFO:tensorflow:global_step/sec: 497.951\nINFO:tensorflow:loss = 30.670906, step = 15100 (0.199 sec)\nINFO:tensorflow:global_step/sec: 521.673\nINFO:tensorflow:loss = 72.13617, step = 15200 (0.192 sec)\nINFO:tensorflow:global_step/sec: 428.581\nINFO:tensorflow:loss = 28.455925, step = 15300 (0.233 sec)\nINFO:tensorflow:global_step/sec: 502.692\nINFO:tensorflow:loss = 35.726208, step = 15400 (0.199 sec)\nINFO:tensorflow:global_step/sec: 506.394\nINFO:tensorflow:loss = 32.05252, step = 15500 (0.198 sec)\nINFO:tensorflow:global_step/sec: 520.268\nINFO:tensorflow:loss = 26.44619, step = 15600 (0.192 sec)\nINFO:tensorflow:global_step/sec: 512.007\nINFO:tensorflow:loss = 10.501704, step = 15700 (0.196 sec)\nINFO:tensorflow:global_step/sec: 497.531\nINFO:tensorflow:loss = 44.373955, step = 15800 (0.200 sec)\nINFO:tensorflow:global_step/sec: 499.509\nINFO:tensorflow:loss = 44.226562, step = 15900 (0.200 sec)\nINFO:tensorflow:global_step/sec: 467.418\nINFO:tensorflow:loss = 12.512472, step = 16000 (0.215 sec)\nINFO:tensorflow:global_step/sec: 429.986\nINFO:tensorflow:loss = 12.539215, step = 16100 (0.230 sec)\nINFO:tensorflow:global_step/sec: 395.252\nINFO:tensorflow:loss = 33.00235, step = 16200 (0.256 sec)\nINFO:tensorflow:global_step/sec: 405.094\nINFO:tensorflow:loss = 31.814281, step = 16300 (0.246 sec)\nINFO:tensorflow:global_step/sec: 441.068\nINFO:tensorflow:loss = 37.852043, step = 16400 (0.225 sec)\nINFO:tensorflow:global_step/sec: 526.419\nINFO:tensorflow:loss = 24.984346, step = 16500 (0.192 sec)\nINFO:tensorflow:global_step/sec: 511.548\nINFO:tensorflow:loss = 6.2948427, step = 16600 (0.195 sec)\nINFO:tensorflow:global_step/sec: 520.032\nINFO:tensorflow:loss = 27.390638, step = 16700 (0.190 sec)\nINFO:tensorflow:global_step/sec: 490.841\nINFO:tensorflow:loss = 12.3871565, step = 16800 (0.206 sec)\nINFO:tensorflow:global_step/sec: 522.778\nINFO:tensorflow:loss = 29.851658, step = 16900 (0.190 sec)\nINFO:tensorflow:global_step/sec: 525.006\nINFO:tensorflow:loss = 113.616684, step = 17000 (0.192 sec)\nINFO:tensorflow:global_step/sec: 520.623\nINFO:tensorflow:loss = 14.389073, step = 17100 (0.190 sec)\nINFO:tensorflow:global_step/sec: 510.206\nINFO:tensorflow:loss = 13.052751, step = 17200 (0.198 sec)\nINFO:tensorflow:global_step/sec: 506.414\nINFO:tensorflow:loss = 22.37416, step = 17300 (0.195 sec)\nINFO:tensorflow:global_step/sec: 489.691\nINFO:tensorflow:loss = 26.191563, step = 17400 (0.206 sec)\nINFO:tensorflow:global_step/sec: 527.639\nINFO:tensorflow:loss = 38.271713, step = 17500 (0.191 sec)\nINFO:tensorflow:global_step/sec: 523.462\nINFO:tensorflow:loss = 68.042175, step = 17600 (0.191 sec)\nINFO:tensorflow:global_step/sec: 491.265\nINFO:tensorflow:loss = 3.9661694, step = 17700 (0.203 sec)\nINFO:tensorflow:global_step/sec: 506.066\nINFO:tensorflow:loss = 7.7871118, step = 17800 (0.199 sec)\nINFO:tensorflow:global_step/sec: 487.267\nINFO:tensorflow:loss = 4.147113, step = 17900 (0.203 sec)\nINFO:tensorflow:global_step/sec: 519.16\nINFO:tensorflow:loss = 11.697222, step = 18000 (0.195 sec)\nINFO:tensorflow:global_step/sec: 500.73\nINFO:tensorflow:loss = 26.677807, step = 18100 (0.197 sec)\nINFO:tensorflow:global_step/sec: 458.729\nINFO:tensorflow:loss = 45.53578, step = 18200 (0.221 sec)\nINFO:tensorflow:global_step/sec: 486.923\nINFO:tensorflow:loss = 20.893196, step = 18300 (0.205 sec)\nINFO:tensorflow:global_step/sec: 503.67\nINFO:tensorflow:loss = 10.31268, step = 18400 (0.199 sec)\nINFO:tensorflow:global_step/sec: 509.671\nINFO:tensorflow:loss = 25.87992, step = 18500 (0.196 sec)\nINFO:tensorflow:global_step/sec: 490.75\nINFO:tensorflow:loss = 23.207005, step = 18600 (0.202 sec)\nINFO:tensorflow:global_step/sec: 517.68\nINFO:tensorflow:loss = 12.175332, step = 18700 (0.192 sec)\nINFO:tensorflow:global_step/sec: 498.063\nINFO:tensorflow:loss = 13.832592, step = 18800 (0.201 sec)\nINFO:tensorflow:global_step/sec: 502.15\nINFO:tensorflow:loss = 52.894524, step = 18900 (0.201 sec)\nINFO:tensorflow:global_step/sec: 502.819\nINFO:tensorflow:loss = 34.65064, step = 19000 (0.198 sec)\nINFO:tensorflow:global_step/sec: 514.827\nINFO:tensorflow:loss = 18.531244, step = 19100 (0.195 sec)\nINFO:tensorflow:global_step/sec: 487.574\nINFO:tensorflow:loss = 17.336088, step = 19200 (0.207 sec)\nINFO:tensorflow:global_step/sec: 506.947\nINFO:tensorflow:loss = 13.483857, step = 19300 (0.195 sec)\nINFO:tensorflow:global_step/sec: 494.998\nINFO:tensorflow:loss = 6.236288, step = 19400 (0.203 sec)\nINFO:tensorflow:global_step/sec: 516.745\nINFO:tensorflow:loss = 28.92854, step = 19500 (0.192 sec)\nINFO:tensorflow:global_step/sec: 521.304\nINFO:tensorflow:loss = 23.383617, step = 19600 (0.192 sec)\nINFO:tensorflow:global_step/sec: 496.313\nINFO:tensorflow:loss = 48.519646, step = 19700 (0.200 sec)\nINFO:tensorflow:global_step/sec: 519.391\nINFO:tensorflow:loss = 42.327744, step = 19800 (0.194 sec)\nINFO:tensorflow:global_step/sec: 508.642\nINFO:tensorflow:loss = 63.754765, step = 19900 (0.198 sec)\nINFO:tensorflow:global_step/sec: 525.607\nINFO:tensorflow:loss = 45.647488, step = 20000 (0.188 sec)\nINFO:tensorflow:global_step/sec: 519.671\nINFO:tensorflow:loss = 16.893047, step = 20100 (0.192 sec)\nINFO:tensorflow:global_step/sec: 518.912\nINFO:tensorflow:loss = 9.075402, step = 20200 (0.195 sec)\nINFO:tensorflow:global_step/sec: 407.543\nINFO:tensorflow:loss = 23.233398, step = 20300 (0.243 sec)\nINFO:tensorflow:global_step/sec: 421.6\nINFO:tensorflow:loss = 3.418612, step = 20400 (0.239 sec)\nINFO:tensorflow:global_step/sec: 515.563\nINFO:tensorflow:loss = 14.298994, step = 20500 (0.195 sec)\nINFO:tensorflow:global_step/sec: 511.58\nINFO:tensorflow:loss = 41.55316, step = 20600 (0.194 sec)\nINFO:tensorflow:global_step/sec: 502.16\nINFO:tensorflow:loss = 29.86295, step = 20700 (0.198 sec)\nINFO:tensorflow:global_step/sec: 446.314\nINFO:tensorflow:loss = 10.937971, step = 20800 (0.224 sec)\nINFO:tensorflow:global_step/sec: 418.087\nINFO:tensorflow:loss = 40.25834, step = 20900 (0.242 sec)\nINFO:tensorflow:global_step/sec: 525.251\nINFO:tensorflow:loss = 14.415007, step = 21000 (0.191 sec)\nINFO:tensorflow:global_step/sec: 512.178\nINFO:tensorflow:loss = 4.5525026, step = 21100 (0.195 sec)\nINFO:tensorflow:global_step/sec: 381.541\nINFO:tensorflow:loss = 46.68278, step = 21200 (0.262 sec)\nINFO:tensorflow:global_step/sec: 401.812\nINFO:tensorflow:loss = 19.85836, step = 21300 (0.249 sec)\nINFO:tensorflow:global_step/sec: 513.051\nINFO:tensorflow:loss = 40.12404, step = 21400 (0.193 sec)\nINFO:tensorflow:global_step/sec: 527.046\nINFO:tensorflow:loss = 36.51232, step = 21500 (0.190 sec)\nINFO:tensorflow:global_step/sec: 506.052\nINFO:tensorflow:loss = 15.239742, step = 21600 (0.199 sec)\nINFO:tensorflow:global_step/sec: 516.679\nINFO:tensorflow:loss = 7.9274597, step = 21700 (0.193 sec)\nINFO:tensorflow:global_step/sec: 491.874\nINFO:tensorflow:loss = 29.186543, step = 21800 (0.206 sec)\nINFO:tensorflow:global_step/sec: 499.115\nINFO:tensorflow:loss = 8.849433, step = 21900 (0.197 sec)\nINFO:tensorflow:global_step/sec: 522.859\nINFO:tensorflow:loss = 21.118225, step = 22000 (0.192 sec)\nINFO:tensorflow:global_step/sec: 488.238\nINFO:tensorflow:loss = 29.664843, step = 22100 (0.204 sec)\nINFO:tensorflow:global_step/sec: 508.373\nINFO:tensorflow:loss = 23.199055, step = 22200 (0.199 sec)\nINFO:tensorflow:global_step/sec: 494.13\nINFO:tensorflow:loss = 34.001926, step = 22300 (0.199 sec)\nINFO:tensorflow:global_step/sec: 514.482\nINFO:tensorflow:loss = 33.719437, step = 22400 (0.194 sec)\nINFO:tensorflow:global_step/sec: 519.947\nINFO:tensorflow:loss = 18.323015, step = 22500 (0.195 sec)\nINFO:tensorflow:global_step/sec: 503.487\nINFO:tensorflow:loss = 40.549862, step = 22600 (0.197 sec)\nINFO:tensorflow:global_step/sec: 510.756\nINFO:tensorflow:loss = 22.347485, step = 22700 (0.197 sec)\nINFO:tensorflow:global_step/sec: 508.787\nINFO:tensorflow:loss = 6.3549967, step = 22800 (0.193 sec)\nINFO:tensorflow:global_step/sec: 469.928\nINFO:tensorflow:loss = 10.308098, step = 22900 (0.215 sec)\nINFO:tensorflow:global_step/sec: 516.453\nINFO:tensorflow:loss = 29.08078, step = 23000 (0.194 sec)\nINFO:tensorflow:global_step/sec: 475.17\nINFO:tensorflow:loss = 7.8588934, step = 23100 (0.208 sec)\nINFO:tensorflow:global_step/sec: 429.887\nINFO:tensorflow:loss = 8.526459, step = 23200 (0.234 sec)\nINFO:tensorflow:global_step/sec: 434.359\nINFO:tensorflow:loss = 31.459545, step = 23300 (0.231 sec)\nINFO:tensorflow:global_step/sec: 451.06\nINFO:tensorflow:loss = 32.947006, step = 23400 (0.221 sec)\nINFO:tensorflow:global_step/sec: 459.92\nINFO:tensorflow:loss = 22.06173, step = 23500 (0.218 sec)\nINFO:tensorflow:global_step/sec: 449.098\nINFO:tensorflow:loss = 12.7022705, step = 23600 (0.221 sec)\nINFO:tensorflow:global_step/sec: 460.306\nINFO:tensorflow:loss = 20.33598, step = 23700 (0.216 sec)\nINFO:tensorflow:global_step/sec: 449.766\nINFO:tensorflow:loss = 16.218697, step = 23800 (0.225 sec)\nINFO:tensorflow:global_step/sec: 468.659\nINFO:tensorflow:loss = 16.113918, step = 23900 (0.211 sec)\nINFO:tensorflow:global_step/sec: 454.522\nINFO:tensorflow:loss = 15.874365, step = 24000 (0.221 sec)\nINFO:tensorflow:global_step/sec: 456.315\nINFO:tensorflow:loss = 8.342611, step = 24100 (0.220 sec)\nINFO:tensorflow:global_step/sec: 422.394\nINFO:tensorflow:loss = 23.059067, step = 24200 (0.238 sec)\nINFO:tensorflow:global_step/sec: 445.021\nINFO:tensorflow:loss = 16.13245, step = 24300 (0.224 sec)\nINFO:tensorflow:global_step/sec: 454.829\nINFO:tensorflow:loss = 17.657043, step = 24400 (0.222 sec)\nINFO:tensorflow:global_step/sec: 455.605\nINFO:tensorflow:loss = 5.388861, step = 24500 (0.218 sec)\nINFO:tensorflow:global_step/sec: 449.367\nINFO:tensorflow:loss = 8.143231, step = 24600 (0.223 sec)\nINFO:tensorflow:global_step/sec: 447.439\nINFO:tensorflow:loss = 43.81005, step = 24700 (0.224 sec)\nINFO:tensorflow:global_step/sec: 449.919\nINFO:tensorflow:loss = 48.555767, step = 24800 (0.223 sec)\nINFO:tensorflow:global_step/sec: 443.161\nINFO:tensorflow:loss = 10.46989, step = 24900 (0.223 sec)\nINFO:tensorflow:global_step/sec: 458.533\nINFO:tensorflow:loss = 15.549078, step = 25000 (0.219 sec)\nINFO:tensorflow:global_step/sec: 454.688\nINFO:tensorflow:loss = 11.198522, step = 25100 (0.221 sec)\nINFO:tensorflow:global_step/sec: 431.469\nINFO:tensorflow:loss = 6.6283317, step = 25200 (0.230 sec)\nINFO:tensorflow:global_step/sec: 362.286\nINFO:tensorflow:loss = 8.3116255, step = 25300 (0.278 sec)\nINFO:tensorflow:global_step/sec: 398.294\nINFO:tensorflow:loss = 12.494085, step = 25400 (0.248 sec)\nINFO:tensorflow:global_step/sec: 461.065\nINFO:tensorflow:loss = 18.995468, step = 25500 (0.220 sec)\nINFO:tensorflow:global_step/sec: 393.973\nINFO:tensorflow:loss = 4.9694777, step = 25600 (0.252 sec)\nINFO:tensorflow:global_step/sec: 362.767\nINFO:tensorflow:loss = 48.70556, step = 25700 (0.275 sec)\nINFO:tensorflow:global_step/sec: 368.683\nINFO:tensorflow:loss = 39.061554, step = 25800 (0.273 sec)\nINFO:tensorflow:global_step/sec: 358.328\nINFO:tensorflow:loss = 6.10777, step = 25900 (0.279 sec)\nINFO:tensorflow:global_step/sec: 469.953\nINFO:tensorflow:loss = 8.0085335, step = 26000 (0.210 sec)\nINFO:tensorflow:global_step/sec: 452.612\nINFO:tensorflow:loss = 20.946404, step = 26100 (0.223 sec)\nINFO:tensorflow:global_step/sec: 423.389\nINFO:tensorflow:loss = 7.8029327, step = 26200 (0.237 sec)\nINFO:tensorflow:global_step/sec: 367.126\nINFO:tensorflow:loss = 4.6645155, step = 26300 (0.270 sec)\nINFO:tensorflow:global_step/sec: 424.94\nINFO:tensorflow:loss = 20.448275, step = 26400 (0.237 sec)\nINFO:tensorflow:global_step/sec: 451.92\nINFO:tensorflow:loss = 26.253376, step = 26500 (0.223 sec)\nINFO:tensorflow:global_step/sec: 456.83\nINFO:tensorflow:loss = 17.100275, step = 26600 (0.217 sec)\nINFO:tensorflow:global_step/sec: 459.726\nINFO:tensorflow:loss = 10.13943, step = 26700 (0.216 sec)\nINFO:tensorflow:global_step/sec: 459.585\nINFO:tensorflow:loss = 21.897095, step = 26800 (0.219 sec)\nINFO:tensorflow:global_step/sec: 459.073\nINFO:tensorflow:loss = 20.122267, step = 26900 (0.218 sec)\nINFO:tensorflow:global_step/sec: 441.092\nINFO:tensorflow:loss = 7.9530864, step = 27000 (0.229 sec)\nINFO:tensorflow:global_step/sec: 455.453\nINFO:tensorflow:loss = 8.72676, step = 27100 (0.220 sec)\nINFO:tensorflow:global_step/sec: 444.292\nINFO:tensorflow:loss = 5.87437, step = 27200 (0.223 sec)\nINFO:tensorflow:global_step/sec: 373.106\nINFO:tensorflow:loss = 20.261028, step = 27300 (0.270 sec)\nINFO:tensorflow:global_step/sec: 355.117\nINFO:tensorflow:loss = 16.343578, step = 27400 (0.282 sec)\nINFO:tensorflow:global_step/sec: 356.05\nINFO:tensorflow:loss = 25.072216, step = 27500 (0.278 sec)\nINFO:tensorflow:global_step/sec: 415.454\nINFO:tensorflow:loss = 20.354189, step = 27600 (0.243 sec)\nINFO:tensorflow:global_step/sec: 493.199\nINFO:tensorflow:loss = 9.349465, step = 27700 (0.203 sec)\nINFO:tensorflow:global_step/sec: 518.551\nINFO:tensorflow:loss = 5.633339, step = 27800 (0.195 sec)\nINFO:tensorflow:global_step/sec: 485.727\nINFO:tensorflow:loss = 3.9397788, step = 27900 (0.204 sec)\nINFO:tensorflow:global_step/sec: 525.667\nINFO:tensorflow:loss = 34.323402, step = 28000 (0.190 sec)\nINFO:tensorflow:global_step/sec: 517.244\nINFO:tensorflow:loss = 11.36763, step = 28100 (0.193 sec)\nINFO:tensorflow:global_step/sec: 505.192\nINFO:tensorflow:loss = 5.9173775, step = 28200 (0.198 sec)\nINFO:tensorflow:global_step/sec: 520.631\nINFO:tensorflow:loss = 19.196718, step = 28300 (0.192 sec)\nINFO:tensorflow:global_step/sec: 432.488\nINFO:tensorflow:loss = 9.608658, step = 28400 (0.230 sec)\nINFO:tensorflow:global_step/sec: 519.473\nINFO:tensorflow:loss = 43.098812, step = 28500 (0.196 sec)\nINFO:tensorflow:global_step/sec: 492.387\nINFO:tensorflow:loss = 2.7491546, step = 28600 (0.199 sec)\nINFO:tensorflow:global_step/sec: 480.372\nINFO:tensorflow:loss = 18.603237, step = 28700 (0.207 sec)\nINFO:tensorflow:global_step/sec: 515.891\nINFO:tensorflow:loss = 13.887796, step = 28800 (0.196 sec)\nINFO:tensorflow:global_step/sec: 472.748\nINFO:tensorflow:loss = 27.101536, step = 28900 (0.211 sec)\nINFO:tensorflow:global_step/sec: 518.261\nINFO:tensorflow:loss = 17.810965, step = 29000 (0.194 sec)\nINFO:tensorflow:global_step/sec: 515.356\nINFO:tensorflow:loss = 6.344363, step = 29100 (0.193 sec)\nINFO:tensorflow:global_step/sec: 510.507\nINFO:tensorflow:loss = 9.34887, step = 29200 (0.197 sec)\nINFO:tensorflow:global_step/sec: 490.773\nINFO:tensorflow:loss = 16.565483, step = 29300 (0.203 sec)\nINFO:tensorflow:global_step/sec: 519.136\nINFO:tensorflow:loss = 9.5081625, step = 29400 (0.191 sec)\nINFO:tensorflow:global_step/sec: 512.306\nINFO:tensorflow:loss = 9.057052, step = 29500 (0.195 sec)\nINFO:tensorflow:global_step/sec: 526.805\nINFO:tensorflow:loss = 8.786637, step = 29600 (0.192 sec)\nINFO:tensorflow:global_step/sec: 509.595\nINFO:tensorflow:loss = 18.639, step = 29700 (0.194 sec)\nINFO:tensorflow:global_step/sec: 511.687\nINFO:tensorflow:loss = 32.004196, step = 29800 (0.197 sec)\nINFO:tensorflow:global_step/sec: 496.236\nINFO:tensorflow:loss = 27.971159, step = 29900 (0.201 sec)\nINFO:tensorflow:global_step/sec: 475.445\nINFO:tensorflow:loss = 22.11385, step = 30000 (0.210 sec)\nINFO:tensorflow:global_step/sec: 501.336\nINFO:tensorflow:loss = 6.959998, step = 30100 (0.201 sec)\nINFO:tensorflow:global_step/sec: 491.052\nINFO:tensorflow:loss = 16.430698, step = 30200 (0.203 sec)\nINFO:tensorflow:global_step/sec: 512.219\nINFO:tensorflow:loss = 12.368324, step = 30300 (0.194 sec)\nINFO:tensorflow:global_step/sec: 515.497\nINFO:tensorflow:loss = 10.416364, step = 30400 (0.195 sec)\nINFO:tensorflow:global_step/sec: 518.045\nINFO:tensorflow:loss = 5.951277, step = 30500 (0.192 sec)\nINFO:tensorflow:global_step/sec: 500.247\nINFO:tensorflow:loss = 4.673738, step = 30600 (0.200 sec)\nINFO:tensorflow:global_step/sec: 506.918\nINFO:tensorflow:loss = 11.984025, step = 30700 (0.199 sec)\nINFO:tensorflow:global_step/sec: 479.876\nINFO:tensorflow:loss = 10.686143, step = 30800 (0.208 sec)\nINFO:tensorflow:global_step/sec: 523.997\nINFO:tensorflow:loss = 10.539244, step = 30900 (0.189 sec)\nINFO:tensorflow:global_step/sec: 496.366\nINFO:tensorflow:loss = 20.505741, step = 31000 (0.204 sec)\nINFO:tensorflow:global_step/sec: 522.342\nINFO:tensorflow:loss = 16.979277, step = 31100 (0.193 sec)\nINFO:tensorflow:global_step/sec: 464.14\nINFO:tensorflow:loss = 5.7998734, step = 31200 (0.213 sec)\nINFO:tensorflow:global_step/sec: 514.332\nINFO:tensorflow:loss = 13.100612, step = 31300 (0.195 sec)\nINFO:tensorflow:global_step/sec: 523.379\nINFO:tensorflow:loss = 5.54752, step = 31400 (0.192 sec)\nINFO:tensorflow:global_step/sec: 508.706\nINFO:tensorflow:loss = 11.001032, step = 31500 (0.195 sec)\nINFO:tensorflow:global_step/sec: 523.109\nINFO:tensorflow:loss = 6.4239764, step = 31600 (0.191 sec)\nINFO:tensorflow:global_step/sec: 499.473\nINFO:tensorflow:loss = 3.496529, step = 31700 (0.200 sec)\nINFO:tensorflow:global_step/sec: 502.158\nINFO:tensorflow:loss = 18.764368, step = 31800 (0.201 sec)\nINFO:tensorflow:global_step/sec: 517.452\nINFO:tensorflow:loss = 11.178088, step = 31900 (0.193 sec)\nINFO:tensorflow:global_step/sec: 515.668\nINFO:tensorflow:loss = 20.153545, step = 32000 (0.193 sec)\nINFO:tensorflow:global_step/sec: 526.306\nINFO:tensorflow:loss = 12.307385, step = 32100 (0.190 sec)\nINFO:tensorflow:global_step/sec: 486.787\nINFO:tensorflow:loss = 12.578199, step = 32200 (0.204 sec)\nINFO:tensorflow:global_step/sec: 513.781\nINFO:tensorflow:loss = 3.8448281, step = 32300 (0.194 sec)\nINFO:tensorflow:global_step/sec: 513.106\nINFO:tensorflow:loss = 12.068693, step = 32400 (0.194 sec)\nINFO:tensorflow:global_step/sec: 514.275\nINFO:tensorflow:loss = 6.5997915, step = 32500 (0.196 sec)\nINFO:tensorflow:global_step/sec: 495.174\nINFO:tensorflow:loss = 13.961435, step = 32600 (0.203 sec)\nINFO:tensorflow:global_step/sec: 518.142\nINFO:tensorflow:loss = 16.125095, step = 32700 (0.193 sec)\nINFO:tensorflow:global_step/sec: 477.763\nINFO:tensorflow:loss = 2.0854216, step = 32800 (0.210 sec)\nINFO:tensorflow:global_step/sec: 511.127\nINFO:tensorflow:loss = 12.997886, step = 32900 (0.194 sec)\nINFO:tensorflow:global_step/sec: 431.59\nINFO:tensorflow:loss = 36.44945, step = 33000 (0.232 sec)\nINFO:tensorflow:global_step/sec: 437.366\nINFO:tensorflow:loss = 12.546466, step = 33100 (0.230 sec)\nINFO:tensorflow:global_step/sec: 496.786\nINFO:tensorflow:loss = 19.634865, step = 33200 (0.202 sec)\nINFO:tensorflow:global_step/sec: 521.102\nINFO:tensorflow:loss = 1.9614507, step = 33300 (0.192 sec)\nINFO:tensorflow:global_step/sec: 433.463\nINFO:tensorflow:loss = 27.736622, step = 33400 (0.231 sec)\nINFO:tensorflow:global_step/sec: 492.516\nINFO:tensorflow:loss = 55.501156, step = 33500 (0.202 sec)\nINFO:tensorflow:global_step/sec: 517.448\nINFO:tensorflow:loss = 11.6983795, step = 33600 (0.194 sec)\nINFO:tensorflow:global_step/sec: 496.94\nINFO:tensorflow:loss = 11.572467, step = 33700 (0.203 sec)\nINFO:tensorflow:global_step/sec: 511.756\nINFO:tensorflow:loss = 16.215508, step = 33800 (0.192 sec)\nINFO:tensorflow:global_step/sec: 509.571\nINFO:tensorflow:loss = 17.425852, step = 33900 (0.196 sec)\nINFO:tensorflow:global_step/sec: 415.64\nINFO:tensorflow:loss = 75.26845, step = 34000 (0.240 sec)\nINFO:tensorflow:global_step/sec: 442.953\nINFO:tensorflow:loss = 6.284119, step = 34100 (0.228 sec)\nINFO:tensorflow:global_step/sec: 502.829\nINFO:tensorflow:loss = 21.842957, step = 34200 (0.199 sec)\nINFO:tensorflow:global_step/sec: 509.629\nINFO:tensorflow:loss = 11.651093, step = 34300 (0.195 sec)\nINFO:tensorflow:global_step/sec: 512.551\nINFO:tensorflow:loss = 13.308762, step = 34400 (0.196 sec)\nINFO:tensorflow:global_step/sec: 519.361\nINFO:tensorflow:loss = 44.151558, step = 34500 (0.190 sec)\nINFO:tensorflow:global_step/sec: 510.91\nINFO:tensorflow:loss = 6.8464866, step = 34600 (0.200 sec)\nINFO:tensorflow:global_step/sec: 485.814\nINFO:tensorflow:loss = 10.824612, step = 34700 (0.201 sec)\nINFO:tensorflow:global_step/sec: 521.409\nINFO:tensorflow:loss = 3.843721, step = 34800 (0.192 sec)\nINFO:tensorflow:global_step/sec: 486.434\nINFO:tensorflow:loss = 5.546958, step = 34900 (0.209 sec)\nINFO:tensorflow:global_step/sec: 411.628\nINFO:tensorflow:loss = 18.526724, step = 35000 (0.242 sec)\nINFO:tensorflow:global_step/sec: 489.118\nINFO:tensorflow:loss = 20.928247, step = 35100 (0.203 sec)\nINFO:tensorflow:global_step/sec: 395.192\nINFO:tensorflow:loss = 3.5254164, step = 35200 (0.253 sec)\nINFO:tensorflow:global_step/sec: 411.988\nINFO:tensorflow:loss = 3.8785763, step = 35300 (0.245 sec)\nINFO:tensorflow:global_step/sec: 414.26\nINFO:tensorflow:loss = 11.306296, step = 35400 (0.239 sec)\nINFO:tensorflow:global_step/sec: 458.516\nINFO:tensorflow:loss = 2.8811145, step = 35500 (0.219 sec)\nINFO:tensorflow:global_step/sec: 494.09\nINFO:tensorflow:loss = 15.360186, step = 35600 (0.203 sec)\nINFO:tensorflow:global_step/sec: 516.009\nINFO:tensorflow:loss = 11.368165, step = 35700 (0.194 sec)\nINFO:tensorflow:global_step/sec: 469.834\nINFO:tensorflow:loss = 26.508482, step = 35800 (0.210 sec)\nINFO:tensorflow:global_step/sec: 475.315\nINFO:tensorflow:loss = 13.314274, step = 35900 (0.213 sec)\nINFO:tensorflow:global_step/sec: 510.774\nINFO:tensorflow:loss = 2.1001377, step = 36000 (0.196 sec)\nINFO:tensorflow:global_step/sec: 500.183\nINFO:tensorflow:loss = 20.971676, step = 36100 (0.198 sec)\nINFO:tensorflow:global_step/sec: 510.361\nINFO:tensorflow:loss = 7.3410335, step = 36200 (0.196 sec)\nINFO:tensorflow:global_step/sec: 497.083\nINFO:tensorflow:loss = 5.997114, step = 36300 (0.202 sec)\nINFO:tensorflow:global_step/sec: 484.791\nINFO:tensorflow:loss = 11.839415, step = 36400 (0.205 sec)\nINFO:tensorflow:global_step/sec: 516.338\nINFO:tensorflow:loss = 11.825226, step = 36500 (0.192 sec)\nINFO:tensorflow:global_step/sec: 500.196\nINFO:tensorflow:loss = 18.386703, step = 36600 (0.202 sec)\nINFO:tensorflow:global_step/sec: 509.374\nINFO:tensorflow:loss = 8.380814, step = 36700 (0.196 sec)\nINFO:tensorflow:global_step/sec: 486.575\nINFO:tensorflow:loss = 11.68699, step = 36800 (0.205 sec)\nINFO:tensorflow:global_step/sec: 515.215\nINFO:tensorflow:loss = 3.6182966, step = 36900 (0.193 sec)\nINFO:tensorflow:global_step/sec: 514.575\nINFO:tensorflow:loss = 11.7091255, step = 37000 (0.197 sec)\nINFO:tensorflow:global_step/sec: 490.459\nINFO:tensorflow:loss = 7.473883, step = 37100 (0.202 sec)\nINFO:tensorflow:global_step/sec: 511.163\nINFO:tensorflow:loss = 13.126901, step = 37200 (0.196 sec)\nINFO:tensorflow:global_step/sec: 503.087\nINFO:tensorflow:loss = 27.171856, step = 37300 (0.201 sec)\nINFO:tensorflow:global_step/sec: 504.426\nINFO:tensorflow:loss = 14.645201, step = 37400 (0.197 sec)\nINFO:tensorflow:global_step/sec: 515.755\nINFO:tensorflow:loss = 12.13932, step = 37500 (0.195 sec)\nINFO:tensorflow:global_step/sec: 524.203\nINFO:tensorflow:loss = 15.745104, step = 37600 (0.189 sec)\nINFO:tensorflow:global_step/sec: 481.138\nINFO:tensorflow:loss = 4.440466, step = 37700 (0.209 sec)\nINFO:tensorflow:global_step/sec: 474.562\nINFO:tensorflow:loss = 4.598632, step = 37800 (0.213 sec)\nINFO:tensorflow:global_step/sec: 402.569\nINFO:tensorflow:loss = 8.279557, step = 37900 (0.245 sec)\nINFO:tensorflow:global_step/sec: 492.538\nINFO:tensorflow:loss = 9.994772, step = 38000 (0.208 sec)\nINFO:tensorflow:global_step/sec: 484.607\nINFO:tensorflow:loss = 17.756512, step = 38100 (0.202 sec)\nINFO:tensorflow:global_step/sec: 510.99\nINFO:tensorflow:loss = 12.600154, step = 38200 (0.195 sec)\nINFO:tensorflow:global_step/sec: 502.178\nINFO:tensorflow:loss = 28.867947, step = 38300 (0.202 sec)\nINFO:tensorflow:global_step/sec: 502.135\nINFO:tensorflow:loss = 17.5764, step = 38400 (0.195 sec)\nINFO:tensorflow:global_step/sec: 476.042\nINFO:tensorflow:loss = 19.424034, step = 38500 (0.213 sec)\nINFO:tensorflow:global_step/sec: 495.637\nINFO:tensorflow:loss = 28.154533, step = 38600 (0.202 sec)\nINFO:tensorflow:global_step/sec: 513.811\nINFO:tensorflow:loss = 21.845222, step = 38700 (0.192 sec)\nINFO:tensorflow:global_step/sec: 500.557\nINFO:tensorflow:loss = 3.6739442, step = 38800 (0.200 sec)\nINFO:tensorflow:global_step/sec: 508.338\nINFO:tensorflow:loss = 16.488531, step = 38900 (0.196 sec)\nINFO:tensorflow:global_step/sec: 518.026\nINFO:tensorflow:loss = 9.3945465, step = 39000 (0.194 sec)\nINFO:tensorflow:global_step/sec: 506.963\nINFO:tensorflow:loss = 6.8298917, step = 39100 (0.199 sec)\nINFO:tensorflow:global_step/sec: 509.377\nINFO:tensorflow:loss = 21.616089, step = 39200 (0.196 sec)\nINFO:tensorflow:global_step/sec: 508.596\nINFO:tensorflow:loss = 6.8472233, step = 39300 (0.195 sec)\nINFO:tensorflow:global_step/sec: 496.872\nINFO:tensorflow:loss = 14.046279, step = 39400 (0.204 sec)\nINFO:tensorflow:global_step/sec: 514.846\nINFO:tensorflow:loss = 9.037189, step = 39500 (0.191 sec)\nINFO:tensorflow:global_step/sec: 509.705\nINFO:tensorflow:loss = 4.8524456, step = 39600 (0.202 sec)\nINFO:tensorflow:global_step/sec: 493.839\nINFO:tensorflow:loss = 12.701547, step = 39700 (0.199 sec)\nINFO:tensorflow:global_step/sec: 423.267\nINFO:tensorflow:loss = 29.621311, step = 39800 (0.236 sec)\nINFO:tensorflow:global_step/sec: 421.033\nINFO:tensorflow:loss = 4.307104, step = 39900 (0.237 sec)\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 40000...\nINFO:tensorflow:Saving checkpoints for 40000 into models/autompg-dnnregressor/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 40000...\nINFO:tensorflow:Loss for final step: 15.794298.\n" ], [ "reloaded_regressor = tf.estimator.DNNRegressor(\n feature_columns=all_feature_columns,\n hidden_units=[32, 10],\n warm_start_from='models/autompg-dnnregressor/',\n model_dir='models/autompg-dnnregressor/')", "INFO:tensorflow:Using default config.\nINFO:tensorflow:Using config: {'_model_dir': 'models/autompg-dnnregressor/', '_tf_random_seed': None, '_save_summary_steps': 100, '_save_checkpoints_steps': None, '_save_checkpoints_secs': 600, '_session_config': allow_soft_placement: true\ngraph_options {\n rewrite_options {\n meta_optimizer_iterations: ONE\n }\n}\n, '_keep_checkpoint_max': 5, '_keep_checkpoint_every_n_hours': 10000, '_log_step_count_steps': 100, '_train_distribute': None, '_device_fn': None, '_protocol': None, '_eval_distribute': None, '_experimental_distribute': None, '_experimental_max_worker_delay_secs': None, '_session_creation_timeout_secs': 7200, '_checkpoint_save_graph_def': True, '_service': None, '_cluster_spec': ClusterSpec({}), '_task_type': 'worker', '_task_id': 0, '_global_id_in_cluster': 0, '_master': '', '_evaluation_master': '', '_is_chief': True, '_num_ps_replicas': 0, '_num_worker_replicas': 1}\n" ], [ "def eval_input_fn(df_test, batch_size=8):\n df = df_test.copy()\n test_x, test_y = df, df.pop('MPG')\n dataset = tf.data.Dataset.from_tensor_slices((dict(test_x), test_y))\n\n return dataset.batch(batch_size)\n\neval_results = reloaded_regressor.evaluate(\n input_fn=lambda:eval_input_fn(df_test_norm, batch_size=8))\n\nfor key in eval_results:\n print('{:15s} {}'.format(key, eval_results[key]))\n \nprint('평균 손실 {:.4f}'.format(eval_results['average_loss']))", "INFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Starting evaluation at 2021-01-02T15:55:48Z\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from models/autompg-dnnregressor/model.ckpt-40000\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Inference Time : 0.22684s\nINFO:tensorflow:Finished evaluation at 2021-01-02-15:55:48\nINFO:tensorflow:Saving dict for global step 40000: average_loss = 16.353544, global_step = 40000, label/mean = 23.611391, loss = 16.300344, prediction/mean = 21.912441\nINFO:tensorflow:Saving 'checkpoint_path' summary for global step 40000: models/autompg-dnnregressor/model.ckpt-40000\naverage_loss 16.353544235229492\nlabel/mean 23.611391067504883\nloss 16.300344467163086\nprediction/mean 21.91244125366211\nglobal_step 40000\n평균 손실 16.3535\n" ], [ "pred_res = regressor.predict(input_fn=lambda: eval_input_fn(df_test_norm, batch_size=8))\n\nprint(next(iter(pred_res)))", "INFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from models/autompg-dnnregressor/model.ckpt-40000\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\n{'predictions': array([22.728632], dtype=float32)}\n" ] ], [ [ "#### Boosted Tree Regressor", "_____no_output_____" ] ], [ [ "boosted_tree = tf.estimator.BoostedTreesRegressor(\n feature_columns=all_feature_columns,\n n_batches_per_layer=20,\n n_trees=200)\n\nboosted_tree.train(\n input_fn=lambda:train_input_fn(df_train_norm, batch_size=BATCH_SIZE))\n\neval_results = boosted_tree.evaluate(\n input_fn=lambda:eval_input_fn(df_test_norm, batch_size=8))\n\nprint(eval_results)\n\nprint('평균 손실 {:.4f}'.format(eval_results['average_loss']))", "INFO:tensorflow:Using default config.\nWARNING:tensorflow:Using temporary folder as model directory: /tmp/tmpe71wdd8q\nINFO:tensorflow:Using config: {'_model_dir': '/tmp/tmpe71wdd8q', '_tf_random_seed': None, '_save_summary_steps': 100, '_save_checkpoints_steps': None, '_save_checkpoints_secs': 600, '_session_config': allow_soft_placement: true\ngraph_options {\n rewrite_options {\n meta_optimizer_iterations: ONE\n }\n}\n, '_keep_checkpoint_max': 5, '_keep_checkpoint_every_n_hours': 10000, '_log_step_count_steps': 100, '_train_distribute': None, '_device_fn': None, '_protocol': None, '_eval_distribute': None, '_experimental_distribute': None, '_experimental_max_worker_delay_secs': None, '_session_creation_timeout_secs': 7200, '_checkpoint_save_graph_def': True, '_service': None, '_cluster_spec': ClusterSpec({}), '_task_type': 'worker', '_task_id': 0, '_global_id_in_cluster': 0, '_master': '', '_evaluation_master': '', '_is_chief': True, '_num_ps_replicas': 0, '_num_worker_replicas': 1}\nWARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_estimator/python/estimator/canned/boosted_trees.py:398: VocabularyListCategoricalColumn._num_buckets (from tensorflow.python.feature_column.feature_column_v2) is deprecated and will be removed in a future version.\nInstructions for updating:\nThe old _FeatureColumn APIs are being deprecated. Please use the new FeatureColumn APIs instead.\nINFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Create CheckpointSaverHook.\nWARNING:tensorflow:Issue encountered when serializing resources.\nType is unsupported, or the types of the items don't match field type in CollectionDef. Note this is a warning and probably safe to ignore.\n'_Resource' object has no attribute 'name'\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nWARNING:tensorflow:Issue encountered when serializing resources.\nType is unsupported, or the types of the items don't match field type in CollectionDef. Note this is a warning and probably safe to ignore.\n'_Resource' object has no attribute 'name'\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 0...\nINFO:tensorflow:Saving checkpoints for 0 into /tmp/tmpe71wdd8q/model.ckpt.\nWARNING:tensorflow:Issue encountered when serializing resources.\nType is unsupported, or the types of the items don't match field type in CollectionDef. Note this is a warning and probably safe to ignore.\n'_Resource' object has no attribute 'name'\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 0...\nINFO:tensorflow:loss = 779.1825, step = 0\nINFO:tensorflow:loss = 175.98672, step = 80 (0.708 sec)\nINFO:tensorflow:global_step/sec: 112.597\nINFO:tensorflow:loss = 88.06142, step = 180 (0.514 sec)\nINFO:tensorflow:global_step/sec: 236.325\nINFO:tensorflow:loss = 28.334957, step = 280 (0.439 sec)\nINFO:tensorflow:global_step/sec: 231.243\nINFO:tensorflow:loss = 7.330826, step = 380 (0.421 sec)\nINFO:tensorflow:global_step/sec: 236.812\nINFO:tensorflow:loss = 28.439013, step = 480 (0.511 sec)\nINFO:tensorflow:global_step/sec: 192.031\nINFO:tensorflow:loss = 2.9001746, step = 580 (0.428 sec)\nINFO:tensorflow:global_step/sec: 237.608\nINFO:tensorflow:loss = 5.0455194, step = 680 (0.429 sec)\nINFO:tensorflow:global_step/sec: 234.501\nINFO:tensorflow:loss = 3.5293148, step = 780 (0.427 sec)\nINFO:tensorflow:global_step/sec: 234.698\nINFO:tensorflow:loss = 3.3015428, step = 880 (0.431 sec)\nINFO:tensorflow:global_step/sec: 232.239\nINFO:tensorflow:loss = 1.2589538, step = 980 (0.478 sec)\nINFO:tensorflow:global_step/sec: 208.494\nINFO:tensorflow:loss = 3.0725284, step = 1080 (0.421 sec)\nINFO:tensorflow:global_step/sec: 239.407\nINFO:tensorflow:loss = 1.3558465, step = 1180 (0.422 sec)\nINFO:tensorflow:global_step/sec: 233.827\nINFO:tensorflow:loss = 3.4222438, step = 1280 (0.420 sec)\nINFO:tensorflow:global_step/sec: 240.196\nINFO:tensorflow:loss = 1.4523966, step = 1380 (0.420 sec)\nINFO:tensorflow:global_step/sec: 236.738\nINFO:tensorflow:loss = 0.7821708, step = 1480 (0.434 sec)\nINFO:tensorflow:global_step/sec: 231.39\nINFO:tensorflow:loss = 0.89154446, step = 1580 (0.432 sec)\nINFO:tensorflow:global_step/sec: 232.166\nINFO:tensorflow:loss = 2.9137201, step = 1680 (0.434 sec)\nINFO:tensorflow:global_step/sec: 230.789\nINFO:tensorflow:loss = 3.590133, step = 1780 (0.417 sec)\nINFO:tensorflow:global_step/sec: 237.99\nINFO:tensorflow:loss = 1.9238122, step = 1880 (0.431 sec)\nINFO:tensorflow:global_step/sec: 231.652\nINFO:tensorflow:loss = 1.3043885, step = 1980 (0.427 sec)\nINFO:tensorflow:global_step/sec: 233.25\nINFO:tensorflow:loss = 1.0231631, step = 2080 (0.511 sec)\nINFO:tensorflow:global_step/sec: 197.584\nINFO:tensorflow:loss = 0.7818819, step = 2180 (0.432 sec)\nINFO:tensorflow:global_step/sec: 228.415\nINFO:tensorflow:loss = 2.3887777, step = 2280 (0.427 sec)\nINFO:tensorflow:global_step/sec: 227.075\nINFO:tensorflow:loss = 0.40529764, step = 2380 (0.489 sec)\nINFO:tensorflow:global_step/sec: 210.38\nINFO:tensorflow:loss = 0.3062593, step = 2480 (0.420 sec)\nINFO:tensorflow:global_step/sec: 239.896\nINFO:tensorflow:loss = 0.5025814, step = 2580 (0.435 sec)\nINFO:tensorflow:global_step/sec: 230.695\nINFO:tensorflow:loss = 0.99545026, step = 2680 (0.436 sec)\nINFO:tensorflow:global_step/sec: 230.301\nINFO:tensorflow:loss = 1.8740138, step = 2780 (0.433 sec)\nINFO:tensorflow:global_step/sec: 228.792\nINFO:tensorflow:loss = 2.5301783, step = 2880 (0.433 sec)\nINFO:tensorflow:global_step/sec: 232.843\nINFO:tensorflow:loss = 1.8291496, step = 2980 (0.426 sec)\nINFO:tensorflow:global_step/sec: 228.888\nINFO:tensorflow:loss = 0.6858313, step = 3080 (0.437 sec)\nINFO:tensorflow:global_step/sec: 233.735\nINFO:tensorflow:loss = 0.53382206, step = 3180 (0.421 sec)\nINFO:tensorflow:global_step/sec: 235.949\nINFO:tensorflow:loss = 0.61666024, step = 3280 (0.472 sec)\nINFO:tensorflow:global_step/sec: 212.431\nINFO:tensorflow:loss = 1.5132174, step = 3380 (0.428 sec)\nINFO:tensorflow:global_step/sec: 234.108\nINFO:tensorflow:loss = 0.26615345, step = 3480 (0.448 sec)\nINFO:tensorflow:global_step/sec: 213.462\nINFO:tensorflow:loss = 0.5673427, step = 3580 (0.493 sec)\nINFO:tensorflow:global_step/sec: 211.63\nINFO:tensorflow:loss = 0.6548833, step = 3680 (0.477 sec)\nINFO:tensorflow:global_step/sec: 199.918\nINFO:tensorflow:loss = 0.4136691, step = 3780 (0.499 sec)\nINFO:tensorflow:global_step/sec: 211.15\nINFO:tensorflow:loss = 0.44898975, step = 3880 (0.427 sec)\nINFO:tensorflow:global_step/sec: 226.425\nINFO:tensorflow:loss = 0.12739712, step = 3980 (0.513 sec)\nINFO:tensorflow:global_step/sec: 200.577\nINFO:tensorflow:loss = 0.27423677, step = 4080 (0.436 sec)\nINFO:tensorflow:global_step/sec: 226.914\nINFO:tensorflow:loss = 0.30576748, step = 4180 (0.439 sec)\nINFO:tensorflow:global_step/sec: 228.385\nINFO:tensorflow:loss = 0.15210456, step = 4280 (0.424 sec)\nINFO:tensorflow:global_step/sec: 235.308\nINFO:tensorflow:loss = 0.22976612, step = 4380 (0.494 sec)\nINFO:tensorflow:global_step/sec: 195.692\nINFO:tensorflow:loss = 0.24535024, step = 4480 (0.459 sec)\nINFO:tensorflow:global_step/sec: 228.835\nINFO:tensorflow:loss = 0.45115024, step = 4580 (0.447 sec)\nINFO:tensorflow:global_step/sec: 223.004\nINFO:tensorflow:loss = 0.27290797, step = 4680 (0.448 sec)\nINFO:tensorflow:global_step/sec: 220.322\nINFO:tensorflow:loss = 0.2475199, step = 4780 (0.448 sec)\nINFO:tensorflow:global_step/sec: 225.264\nINFO:tensorflow:loss = 0.23342848, step = 4880 (0.439 sec)\nINFO:tensorflow:global_step/sec: 228.466\nINFO:tensorflow:loss = 0.25287765, step = 4980 (0.436 sec)\nINFO:tensorflow:global_step/sec: 228.304\nINFO:tensorflow:loss = 0.07537734, step = 5080 (0.439 sec)\nINFO:tensorflow:global_step/sec: 227.195\nINFO:tensorflow:loss = 0.20548478, step = 5180 (0.441 sec)\nINFO:tensorflow:global_step/sec: 226.893\nINFO:tensorflow:loss = 0.7532023, step = 5280 (0.518 sec)\nINFO:tensorflow:global_step/sec: 191.774\nINFO:tensorflow:loss = 0.21570265, step = 5380 (0.435 sec)\nINFO:tensorflow:global_step/sec: 233.052\nINFO:tensorflow:loss = 0.24697597, step = 5480 (0.441 sec)\nINFO:tensorflow:global_step/sec: 226.153\nINFO:tensorflow:loss = 0.12125553, step = 5580 (0.440 sec)\nINFO:tensorflow:global_step/sec: 228.699\nINFO:tensorflow:loss = 0.21887329, step = 5680 (0.459 sec)\nINFO:tensorflow:global_step/sec: 217.953\nINFO:tensorflow:loss = 0.12589195, step = 5780 (0.438 sec)\nINFO:tensorflow:global_step/sec: 228.668\nINFO:tensorflow:loss = 0.8719354, step = 5880 (0.442 sec)\nINFO:tensorflow:global_step/sec: 220.02\nINFO:tensorflow:loss = 0.24293149, step = 5980 (0.444 sec)\nINFO:tensorflow:global_step/sec: 225.677\nINFO:tensorflow:loss = 0.197566, step = 6080 (0.463 sec)\nINFO:tensorflow:global_step/sec: 218.978\nINFO:tensorflow:loss = 0.22314307, step = 6180 (0.462 sec)\nINFO:tensorflow:global_step/sec: 219.238\nINFO:tensorflow:loss = 0.16728356, step = 6280 (0.441 sec)\nINFO:tensorflow:global_step/sec: 226.413\nINFO:tensorflow:loss = 0.11892565, step = 6380 (0.449 sec)\nINFO:tensorflow:global_step/sec: 223.561\nINFO:tensorflow:loss = 0.10035148, step = 6480 (0.434 sec)\nINFO:tensorflow:global_step/sec: 227.862\nINFO:tensorflow:loss = 0.24382532, step = 6580 (0.474 sec)\nINFO:tensorflow:global_step/sec: 200.733\nINFO:tensorflow:loss = 0.1128447, step = 6680 (0.480 sec)\nINFO:tensorflow:global_step/sec: 220.81\nINFO:tensorflow:loss = 0.24076247, step = 6780 (0.483 sec)\nINFO:tensorflow:global_step/sec: 207.813\nINFO:tensorflow:loss = 0.075261444, step = 6880 (0.432 sec)\nINFO:tensorflow:global_step/sec: 230.704\nINFO:tensorflow:loss = 0.05876013, step = 6980 (0.462 sec)\nINFO:tensorflow:global_step/sec: 208.195\nINFO:tensorflow:loss = 0.06491387, step = 7080 (0.510 sec)\nINFO:tensorflow:global_step/sec: 203.482\nINFO:tensorflow:loss = 0.106327154, step = 7180 (0.440 sec)\nINFO:tensorflow:global_step/sec: 223.96\nINFO:tensorflow:loss = 0.12552896, step = 7280 (0.445 sec)\nINFO:tensorflow:global_step/sec: 227.131\nINFO:tensorflow:loss = 0.14993864, step = 7380 (0.438 sec)\nINFO:tensorflow:global_step/sec: 229.826\nINFO:tensorflow:loss = 0.10687789, step = 7480 (0.448 sec)\nINFO:tensorflow:global_step/sec: 222.381\nINFO:tensorflow:loss = 0.099866405, step = 7580 (0.449 sec)\nINFO:tensorflow:global_step/sec: 222.688\nINFO:tensorflow:loss = 0.047058415, step = 7680 (0.487 sec)\nINFO:tensorflow:global_step/sec: 204.875\nINFO:tensorflow:loss = 0.066626415, step = 7780 (0.441 sec)\nINFO:tensorflow:global_step/sec: 223.965\nINFO:tensorflow:loss = 0.07019142, step = 7880 (0.446 sec)\nINFO:tensorflow:global_step/sec: 221.653\nINFO:tensorflow:loss = 0.037252925, step = 7980 (0.464 sec)\nINFO:tensorflow:global_step/sec: 219.262\nINFO:tensorflow:loss = 0.029428132, step = 8080 (0.456 sec)\nINFO:tensorflow:global_step/sec: 220.442\nINFO:tensorflow:loss = 0.07954688, step = 8180 (0.450 sec)\nINFO:tensorflow:global_step/sec: 221.195\nINFO:tensorflow:loss = 0.06761849, step = 8280 (0.459 sec)\nINFO:tensorflow:global_step/sec: 219.408\nINFO:tensorflow:loss = 0.06025981, step = 8380 (0.447 sec)\nINFO:tensorflow:global_step/sec: 223.415\nINFO:tensorflow:loss = 0.06555028, step = 8480 (0.484 sec)\nINFO:tensorflow:global_step/sec: 195.708\nINFO:tensorflow:loss = 0.14992812, step = 8580 (0.485 sec)\nINFO:tensorflow:global_step/sec: 218.308\nINFO:tensorflow:loss = 0.041804157, step = 8680 (0.440 sec)\nINFO:tensorflow:global_step/sec: 226.623\nINFO:tensorflow:loss = 0.08093469, step = 8780 (0.464 sec)\nINFO:tensorflow:global_step/sec: 216.02\nINFO:tensorflow:loss = 0.035671968, step = 8880 (0.445 sec)\nINFO:tensorflow:global_step/sec: 224.868\nINFO:tensorflow:loss = 0.1250574, step = 8980 (0.447 sec)\nINFO:tensorflow:global_step/sec: 218.321\nINFO:tensorflow:loss = 0.06651616, step = 9080 (0.449 sec)\nINFO:tensorflow:global_step/sec: 225.993\nINFO:tensorflow:loss = 0.10398882, step = 9180 (0.450 sec)\nINFO:tensorflow:global_step/sec: 224.348\nINFO:tensorflow:loss = 0.025028639, step = 9280 (0.451 sec)\nINFO:tensorflow:global_step/sec: 217.119\nINFO:tensorflow:loss = 0.012650586, step = 9380 (0.526 sec)\nINFO:tensorflow:global_step/sec: 191.652\nINFO:tensorflow:loss = 0.047671594, step = 9480 (0.462 sec)\nINFO:tensorflow:global_step/sec: 216.839\nINFO:tensorflow:loss = 0.03253608, step = 9580 (0.453 sec)\nINFO:tensorflow:global_step/sec: 212.392\nINFO:tensorflow:loss = 0.026947241, step = 9680 (0.477 sec)\nINFO:tensorflow:global_step/sec: 219.226\nINFO:tensorflow:loss = 0.071996406, step = 9780 (0.450 sec)\nINFO:tensorflow:global_step/sec: 223.221\nINFO:tensorflow:loss = 0.036975406, step = 9880 (0.456 sec)\nINFO:tensorflow:global_step/sec: 218.857\nINFO:tensorflow:loss = 0.0287939, step = 9980 (0.455 sec)\nINFO:tensorflow:global_step/sec: 220.403\nINFO:tensorflow:loss = 0.038904883, step = 10080 (0.477 sec)\nINFO:tensorflow:global_step/sec: 209.447\nINFO:tensorflow:loss = 0.03822598, step = 10180 (0.451 sec)\nINFO:tensorflow:global_step/sec: 219.953\nINFO:tensorflow:loss = 0.02723059, step = 10280 (0.455 sec)\nINFO:tensorflow:global_step/sec: 216.212\nINFO:tensorflow:loss = 0.024398614, step = 10380 (0.463 sec)\nINFO:tensorflow:global_step/sec: 220.76\nINFO:tensorflow:loss = 0.022796106, step = 10480 (0.452 sec)\nINFO:tensorflow:global_step/sec: 219.346\nINFO:tensorflow:loss = 0.040350664, step = 10580 (0.467 sec)\nINFO:tensorflow:global_step/sec: 214.779\nINFO:tensorflow:loss = 0.032954104, step = 10680 (0.459 sec)\nINFO:tensorflow:global_step/sec: 205.035\nINFO:tensorflow:loss = 0.057137553, step = 10780 (0.544 sec)\nINFO:tensorflow:global_step/sec: 191.955\nINFO:tensorflow:loss = 0.0147186695, step = 10880 (0.505 sec)\nINFO:tensorflow:global_step/sec: 191.07\nINFO:tensorflow:loss = 0.023054967, step = 10980 (0.530 sec)\nINFO:tensorflow:global_step/sec: 195.83\nINFO:tensorflow:loss = 0.048917457, step = 11080 (0.471 sec)\nINFO:tensorflow:global_step/sec: 213.787\nINFO:tensorflow:loss = 0.025292493, step = 11180 (0.509 sec)\nINFO:tensorflow:global_step/sec: 195.252\nINFO:tensorflow:loss = 0.023140596, step = 11280 (0.477 sec)\nINFO:tensorflow:global_step/sec: 210.539\nINFO:tensorflow:loss = 0.009416366, step = 11380 (0.477 sec)\nINFO:tensorflow:global_step/sec: 208.174\nINFO:tensorflow:loss = 0.015295783, step = 11480 (0.471 sec)\nINFO:tensorflow:global_step/sec: 209.569\nINFO:tensorflow:loss = 0.011721921, step = 11580 (0.504 sec)\nINFO:tensorflow:global_step/sec: 192.07\nINFO:tensorflow:loss = 0.017539293, step = 11680 (0.549 sec)\nINFO:tensorflow:global_step/sec: 189.797\nINFO:tensorflow:loss = 0.03581386, step = 11780 (0.470 sec)\nINFO:tensorflow:global_step/sec: 215.889\nINFO:tensorflow:loss = 0.025495213, step = 11880 (0.483 sec)\nINFO:tensorflow:global_step/sec: 204.375\nINFO:tensorflow:loss = 0.019865915, step = 11980 (0.464 sec)\nINFO:tensorflow:global_step/sec: 216.999\nINFO:tensorflow:loss = 0.038710073, step = 12080 (0.464 sec)\nINFO:tensorflow:global_step/sec: 212.916\nINFO:tensorflow:loss = 0.009932896, step = 12180 (0.529 sec)\nINFO:tensorflow:global_step/sec: 182.102\nINFO:tensorflow:loss = 0.026945513, step = 12280 (0.568 sec)\nINFO:tensorflow:global_step/sec: 184.744\nINFO:tensorflow:loss = 0.020113902, step = 12380 (0.465 sec)\nINFO:tensorflow:global_step/sec: 215.823\nINFO:tensorflow:loss = 0.0051452513, step = 12480 (0.458 sec)\nINFO:tensorflow:global_step/sec: 217.934\nINFO:tensorflow:loss = 0.013472352, step = 12580 (0.461 sec)\nINFO:tensorflow:global_step/sec: 213.53\nINFO:tensorflow:loss = 0.0075852657, step = 12680 (0.466 sec)\nINFO:tensorflow:global_step/sec: 216.032\nINFO:tensorflow:loss = 0.01434672, step = 12780 (0.463 sec)\nINFO:tensorflow:global_step/sec: 214.895\nINFO:tensorflow:loss = 0.020459356, step = 12880 (0.464 sec)\nINFO:tensorflow:global_step/sec: 218.047\nINFO:tensorflow:loss = 0.008625217, step = 12980 (0.458 sec)\nINFO:tensorflow:global_step/sec: 219.639\nINFO:tensorflow:loss = 0.017844502, step = 13080 (0.459 sec)\nINFO:tensorflow:global_step/sec: 217.046\nINFO:tensorflow:loss = 0.01284742, step = 13180 (0.486 sec)\nINFO:tensorflow:global_step/sec: 196.313\nINFO:tensorflow:loss = 0.010080893, step = 13280 (0.495 sec)\nINFO:tensorflow:global_step/sec: 211.481\nINFO:tensorflow:loss = 0.023105443, step = 13380 (0.466 sec)\nINFO:tensorflow:global_step/sec: 214.854\nINFO:tensorflow:loss = 0.012591141, step = 13480 (0.465 sec)\nINFO:tensorflow:global_step/sec: 212.892\nINFO:tensorflow:loss = 0.013794397, step = 13580 (0.466 sec)\nINFO:tensorflow:global_step/sec: 216.267\nINFO:tensorflow:loss = 0.01258044, step = 13680 (0.468 sec)\nINFO:tensorflow:global_step/sec: 214.773\nINFO:tensorflow:loss = 0.010764226, step = 13780 (0.477 sec)\nINFO:tensorflow:global_step/sec: 202.68\nINFO:tensorflow:loss = 0.005942981, step = 13880 (0.476 sec)\nINFO:tensorflow:global_step/sec: 216.921\nINFO:tensorflow:loss = 0.014338085, step = 13980 (0.459 sec)\nINFO:tensorflow:global_step/sec: 212.003\nINFO:tensorflow:loss = 0.019534815, step = 14080 (0.533 sec)\nINFO:tensorflow:global_step/sec: 191.437\nINFO:tensorflow:loss = 0.008095667, step = 14180 (0.458 sec)\nINFO:tensorflow:global_step/sec: 219.02\nINFO:tensorflow:loss = 0.0028836923, step = 14280 (0.458 sec)\nINFO:tensorflow:global_step/sec: 216.513\nINFO:tensorflow:loss = 0.0114330305, step = 14380 (0.464 sec)\nINFO:tensorflow:global_step/sec: 215.953\nINFO:tensorflow:loss = 0.009912422, step = 14480 (0.466 sec)\nINFO:tensorflow:global_step/sec: 215.34\nINFO:tensorflow:loss = 0.0039477334, step = 14580 (0.467 sec)\nINFO:tensorflow:global_step/sec: 212.913\nINFO:tensorflow:loss = 0.015556888, step = 14680 (0.465 sec)\nINFO:tensorflow:global_step/sec: 209.396\nINFO:tensorflow:loss = 0.005233367, step = 14780 (0.482 sec)\nINFO:tensorflow:global_step/sec: 216.004\nINFO:tensorflow:loss = 0.0070141368, step = 14880 (0.529 sec)\nINFO:tensorflow:global_step/sec: 181.032\nINFO:tensorflow:loss = 0.0130175855, step = 14980 (0.486 sec)\nINFO:tensorflow:global_step/sec: 214.466\nINFO:tensorflow:loss = 0.0047422783, step = 15080 (0.473 sec)\nINFO:tensorflow:global_step/sec: 211.951\nINFO:tensorflow:loss = 0.0061913, step = 15180 (0.470 sec)\nINFO:tensorflow:global_step/sec: 212.316\nINFO:tensorflow:loss = 0.004428529, step = 15280 (0.467 sec)\nINFO:tensorflow:global_step/sec: 211.761\nINFO:tensorflow:loss = 0.007156968, step = 15380 (0.472 sec)\nINFO:tensorflow:global_step/sec: 213.038\nINFO:tensorflow:loss = 0.0044437535, step = 15480 (0.504 sec)\nINFO:tensorflow:global_step/sec: 190.879\nINFO:tensorflow:loss = 0.004695491, step = 15580 (0.509 sec)\nINFO:tensorflow:global_step/sec: 203.367\nINFO:tensorflow:loss = 0.0025094748, step = 15680 (0.518 sec)\nINFO:tensorflow:global_step/sec: 194.819\nINFO:tensorflow:loss = 0.00094408746, step = 15780 (0.475 sec)\nINFO:tensorflow:global_step/sec: 212.161\nINFO:tensorflow:loss = 0.012425633, step = 15880 (0.468 sec)\nINFO:tensorflow:global_step/sec: 213.532\nINFO:tensorflow:loss = 0.0042187907, step = 15980 (0.487 sec)\nINFO:tensorflow:global_step/sec: 204.339\nINFO:tensorflow:loss = 0.0037577068, step = 16080 (0.464 sec)\nINFO:tensorflow:global_step/sec: 214.958\nINFO:tensorflow:loss = 0.0062155034, step = 16180 (0.481 sec)\nINFO:tensorflow:global_step/sec: 206.566\nINFO:tensorflow:loss = 0.0022613448, step = 16280 (0.468 sec)\nINFO:tensorflow:global_step/sec: 213.882\nINFO:tensorflow:loss = 0.0028099597, step = 16380 (0.473 sec)\nINFO:tensorflow:global_step/sec: 210.176\nINFO:tensorflow:loss = 0.004106181, step = 16480 (0.478 sec)\nINFO:tensorflow:global_step/sec: 211.633\nINFO:tensorflow:loss = 0.0033143421, step = 16580 (0.474 sec)\nINFO:tensorflow:global_step/sec: 211.786\nINFO:tensorflow:loss = 0.0035097834, step = 16680 (0.481 sec)\nINFO:tensorflow:global_step/sec: 207.87\nINFO:tensorflow:loss = 0.0027867071, step = 16780 (0.463 sec)\nINFO:tensorflow:global_step/sec: 213.845\nINFO:tensorflow:loss = 0.009324459, step = 16880 (0.473 sec)\nINFO:tensorflow:global_step/sec: 211.755\nINFO:tensorflow:loss = 0.0021615229, step = 16980 (0.472 sec)\nINFO:tensorflow:global_step/sec: 212.245\nINFO:tensorflow:loss = 0.0048076506, step = 17080 (0.478 sec)\nINFO:tensorflow:global_step/sec: 208.963\nINFO:tensorflow:loss = 0.0018272446, step = 17180 (0.469 sec)\nINFO:tensorflow:global_step/sec: 215.184\nINFO:tensorflow:loss = 0.002462379, step = 17280 (0.482 sec)\nINFO:tensorflow:global_step/sec: 205.925\nINFO:tensorflow:loss = 0.0006275628, step = 17380 (0.469 sec)\nINFO:tensorflow:global_step/sec: 211.733\nINFO:tensorflow:loss = 0.002109193, step = 17480 (0.475 sec)\nINFO:tensorflow:global_step/sec: 211.295\nINFO:tensorflow:loss = 0.0029382277, step = 17580 (0.477 sec)\nINFO:tensorflow:global_step/sec: 209.936\nINFO:tensorflow:loss = 0.0032096568, step = 17680 (0.486 sec)\nINFO:tensorflow:global_step/sec: 207.686\nINFO:tensorflow:loss = 0.002996812, step = 17780 (0.481 sec)\nINFO:tensorflow:global_step/sec: 208.084\nINFO:tensorflow:loss = 0.0027301726, step = 17880 (0.487 sec)\nINFO:tensorflow:global_step/sec: 203.467\nINFO:tensorflow:loss = 0.0016131198, step = 17980 (0.491 sec)\nINFO:tensorflow:global_step/sec: 194.249\nINFO:tensorflow:loss = 0.0071048774, step = 18080 (0.576 sec)\nINFO:tensorflow:global_step/sec: 175.652\nINFO:tensorflow:loss = 0.0023194004, step = 18180 (0.542 sec)\nINFO:tensorflow:global_step/sec: 191.984\nINFO:tensorflow:loss = 0.0015120232, step = 18280 (0.501 sec)\nINFO:tensorflow:global_step/sec: 194.325\nINFO:tensorflow:loss = 0.0016394173, step = 18380 (0.499 sec)\nINFO:tensorflow:global_step/sec: 194.902\nINFO:tensorflow:loss = 0.0007376091, step = 18480 (0.546 sec)\nINFO:tensorflow:global_step/sec: 184.887\nINFO:tensorflow:loss = 0.0028751981, step = 18580 (0.508 sec)\nINFO:tensorflow:global_step/sec: 204.618\nINFO:tensorflow:loss = 0.0008021246, step = 18680 (0.487 sec)\nINFO:tensorflow:global_step/sec: 206.998\nINFO:tensorflow:loss = 0.002925751, step = 18780 (0.474 sec)\nINFO:tensorflow:global_step/sec: 210.391\nINFO:tensorflow:loss = 0.0020086821, step = 18880 (0.479 sec)\nINFO:tensorflow:global_step/sec: 208.779\nINFO:tensorflow:loss = 0.0009860102, step = 18980 (0.476 sec)\nINFO:tensorflow:global_step/sec: 210.898\nINFO:tensorflow:loss = 0.0012985889, step = 19080 (0.477 sec)\nINFO:tensorflow:global_step/sec: 207.897\nINFO:tensorflow:loss = 0.0012460706, step = 19180 (0.526 sec)\nINFO:tensorflow:global_step/sec: 182.58\nINFO:tensorflow:loss = 0.0013941245, step = 19280 (0.500 sec)\nINFO:tensorflow:global_step/sec: 209.864\nINFO:tensorflow:loss = 0.0017754486, step = 19380 (0.482 sec)\nINFO:tensorflow:global_step/sec: 207.462\nINFO:tensorflow:loss = 0.0007509034, step = 19480 (0.482 sec)\nINFO:tensorflow:global_step/sec: 203.189\nINFO:tensorflow:loss = 0.0013608203, step = 19580 (0.520 sec)\nINFO:tensorflow:global_step/sec: 188.442\nINFO:tensorflow:loss = 0.001058562, step = 19680 (0.502 sec)\nINFO:tensorflow:global_step/sec: 204.703\nINFO:tensorflow:loss = 0.0034424188, step = 19780 (0.518 sec)\nINFO:tensorflow:global_step/sec: 194.015\nINFO:tensorflow:loss = 0.0008957273, step = 19880 (0.475 sec)\nINFO:tensorflow:global_step/sec: 207.493\nINFO:tensorflow:loss = 0.002313973, step = 19980 (0.489 sec)\nINFO:tensorflow:global_step/sec: 208.756\nINFO:tensorflow:loss = 0.000694511, step = 20080 (0.484 sec)\nINFO:tensorflow:global_step/sec: 203.375\nINFO:tensorflow:loss = 0.0006695612, step = 20180 (0.492 sec)\nINFO:tensorflow:global_step/sec: 206.74\nINFO:tensorflow:loss = 0.0014117493, step = 20280 (0.475 sec)\nINFO:tensorflow:global_step/sec: 202.108\nINFO:tensorflow:loss = 0.0011933774, step = 20380 (0.502 sec)\nINFO:tensorflow:global_step/sec: 206.154\nINFO:tensorflow:loss = 0.0008137824, step = 20480 (0.472 sec)\nINFO:tensorflow:global_step/sec: 210.589\nINFO:tensorflow:loss = 0.0009201659, step = 20580 (0.525 sec)\nINFO:tensorflow:global_step/sec: 190.006\nINFO:tensorflow:loss = 0.00044394887, step = 20680 (0.481 sec)\nINFO:tensorflow:global_step/sec: 209.716\nINFO:tensorflow:loss = 0.0010550698, step = 20780 (0.501 sec)\nINFO:tensorflow:global_step/sec: 200.574\nINFO:tensorflow:loss = 0.00019395063, step = 20880 (0.479 sec)\nINFO:tensorflow:global_step/sec: 209.267\nINFO:tensorflow:loss = 0.0012454216, step = 20980 (0.500 sec)\nINFO:tensorflow:global_step/sec: 198.726\nINFO:tensorflow:loss = 0.0010517604, step = 21080 (0.472 sec)\nINFO:tensorflow:global_step/sec: 212.335\nINFO:tensorflow:loss = 0.0003900857, step = 21180 (0.490 sec)\nINFO:tensorflow:global_step/sec: 203.871\nINFO:tensorflow:loss = 0.0013436541, step = 21280 (0.482 sec)\nINFO:tensorflow:global_step/sec: 208.191\nINFO:tensorflow:loss = 0.00020852721, step = 21380 (0.523 sec)\nINFO:tensorflow:global_step/sec: 188.887\nINFO:tensorflow:loss = 0.00048694198, step = 21480 (0.486 sec)\nINFO:tensorflow:global_step/sec: 206.18\nINFO:tensorflow:loss = 0.00073513493, step = 21580 (0.502 sec)\nINFO:tensorflow:global_step/sec: 196.849\nINFO:tensorflow:loss = 0.00039215965, step = 21680 (0.486 sec)\nINFO:tensorflow:global_step/sec: 207.478\nINFO:tensorflow:loss = 0.00014613547, step = 21780 (0.497 sec)\nINFO:tensorflow:global_step/sec: 203.14\nINFO:tensorflow:loss = 0.00015599697, step = 21880 (0.479 sec)\nINFO:tensorflow:global_step/sec: 207.261\nINFO:tensorflow:loss = 0.00063628936, step = 21980 (0.496 sec)\nINFO:tensorflow:global_step/sec: 192.981\nINFO:tensorflow:loss = 0.00072673126, step = 22080 (0.569 sec)\nINFO:tensorflow:global_step/sec: 184.533\nINFO:tensorflow:loss = 0.00042106156, step = 22180 (0.490 sec)\nINFO:tensorflow:global_step/sec: 202.957\nINFO:tensorflow:loss = 0.00062714494, step = 22280 (0.479 sec)\nINFO:tensorflow:global_step/sec: 209.223\nINFO:tensorflow:loss = 0.0011216395, step = 22380 (0.494 sec)\nINFO:tensorflow:global_step/sec: 200.908\nINFO:tensorflow:loss = 0.00027068384, step = 22480 (0.502 sec)\nINFO:tensorflow:global_step/sec: 190.631\nINFO:tensorflow:loss = 0.000450986, step = 22580 (0.597 sec)\nINFO:tensorflow:global_step/sec: 176.392\nINFO:tensorflow:loss = 0.00010583318, step = 22680 (0.483 sec)\nINFO:tensorflow:global_step/sec: 206.564\nINFO:tensorflow:loss = 0.0010061075, step = 22780 (0.491 sec)\nINFO:tensorflow:global_step/sec: 200.9\nINFO:tensorflow:loss = 0.00017677023, step = 22880 (0.508 sec)\nINFO:tensorflow:global_step/sec: 191.064\nINFO:tensorflow:loss = 0.00046004873, step = 22980 (0.558 sec)\nINFO:tensorflow:global_step/sec: 183.679\nINFO:tensorflow:loss = 0.00043012408, step = 23080 (0.493 sec)\nINFO:tensorflow:global_step/sec: 205.708\nINFO:tensorflow:loss = 0.00112921, step = 23180 (0.492 sec)\nINFO:tensorflow:global_step/sec: 204.122\nINFO:tensorflow:loss = 0.00034221812, step = 23280 (0.480 sec)\nINFO:tensorflow:global_step/sec: 208.715\nINFO:tensorflow:loss = 0.00030490258, step = 23380 (0.489 sec)\nINFO:tensorflow:global_step/sec: 202.505\nINFO:tensorflow:loss = 0.00030782583, step = 23480 (0.494 sec)\nINFO:tensorflow:global_step/sec: 203.755\nINFO:tensorflow:loss = 0.0006354905, step = 23580 (0.521 sec)\nINFO:tensorflow:global_step/sec: 190.195\nINFO:tensorflow:loss = 0.00061701454, step = 23680 (0.485 sec)\nINFO:tensorflow:global_step/sec: 206.651\nINFO:tensorflow:loss = 0.00044614554, step = 23780 (0.532 sec)\nINFO:tensorflow:global_step/sec: 188.084\nINFO:tensorflow:loss = 0.00011296691, step = 23880 (0.493 sec)\nINFO:tensorflow:global_step/sec: 198.27\nINFO:tensorflow:loss = 0.0002558846, step = 23980 (0.529 sec)\nINFO:tensorflow:global_step/sec: 191.401\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 24000...\nINFO:tensorflow:Saving checkpoints for 24000 into /tmp/tmpe71wdd8q/model.ckpt.\nWARNING:tensorflow:Issue encountered when serializing resources.\nType is unsupported, or the types of the items don't match field type in CollectionDef. Note this is a warning and probably safe to ignore.\n'_Resource' object has no attribute 'name'\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 24000...\nINFO:tensorflow:Loss for final step: 0.00039750503.\nINFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Starting evaluation at 2021-01-02T15:57:43Z\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from /tmp/tmpe71wdd8q/model.ckpt-24000\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Inference Time : 0.23159s\nINFO:tensorflow:Finished evaluation at 2021-01-02-15:57:44\nINFO:tensorflow:Saving dict for global step 24000: average_loss = 12.793907, global_step = 24000, label/mean = 23.611391, loss = 12.713541, prediction/mean = 22.508915\nWARNING:tensorflow:Issue encountered when serializing resources.\nType is unsupported, or the types of the items don't match field type in CollectionDef. Note this is a warning and probably safe to ignore.\n'_Resource' object has no attribute 'name'\nINFO:tensorflow:Saving 'checkpoint_path' summary for global step 24000: /tmp/tmpe71wdd8q/model.ckpt-24000\n{'average_loss': 12.793907, 'label/mean': 23.611391, 'loss': 12.713541, 'prediction/mean': 22.508915, 'global_step': 24000}\n평균 손실 12.7939\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
cbff5bf6513524a3a2b631561d0db44e17ebe535
140,328
ipynb
Jupyter Notebook
examples/.ipynb_checkpoints/contrasts_example-checkpoint.ipynb
alexblnn/pyrft
9087715614ece386a758fadffdf85680e4ec735d
[ "MIT" ]
null
null
null
examples/.ipynb_checkpoints/contrasts_example-checkpoint.ipynb
alexblnn/pyrft
9087715614ece386a758fadffdf85680e4ec735d
[ "MIT" ]
null
null
null
examples/.ipynb_checkpoints/contrasts_example-checkpoint.ipynb
alexblnn/pyrft
9087715614ece386a758fadffdf85680e4ec735d
[ "MIT" ]
null
null
null
237.040541
41,928
0.916717
[ [ [ "# Posthoc Inference on Contrasts\n\nIn this notebook, we provide examples of how to run posthoc inference to infer on contrasts in the linear model.", "_____no_output_____" ], [ "## Set Up", "_____no_output_____" ], [ "#### Import the required python packages.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport numpy.matlib as npm\nimport matplotlib.pyplot as plt \n\nimport sanssouci as ss\n\nimport pyrft as pr", "/home/thirion/mygit/nilearn/nilearn/datasets/__init__.py:92: FutureWarning: Fetchers from the nilearn.datasets module will be updated in version 0.9 to return python strings instead of bytes and Pandas dataframes instead of Numpy arrays.\n warn(\"Fetchers from the nilearn.datasets module will be \"\n" ] ], [ [ "#### Initialize the example", "_____no_output_____" ] ], [ [ "# Set the dimension of the example and the number of subjects\nDim = (50,50)\nN = 100\nm = np.prod(Dim)\n\n# Generate the category vector and obtain the corresponding design matrix\nfrom sklearn.utils import check_random_state\nrng = check_random_state(101)\ncateg = rng.choice(3, N, replace = True)\nX = pr.group_design(categ)\n\n# Specify the contrast matrix (here 2 contrasts are chosen)\nC = np.array([[1, -1, 0], [0, 1, -1]])\n\n# Calulate the number contrasts\nL = C.shape[0]\n\n# Calculate the number of p-values generated (L for each voxels)\nnpvals = m * L\n\n# Generate a white noise field\nlat_data = pr.wfield(Dim, N)\n\n# Generate a stationary random field with given FWHM\n# FWHM = 4; lat_data = pr.statnoise(Dim, N, FWHM)\n\n# Plot a sample realization of the noise\nplt.imshow(lat_data.field[:, :, 1]) ", "_____no_output_____" ] ], [ [ "### Add signal to the field", "_____no_output_____" ] ], [ [ "# Obtain the locations where the category is 2\nw2 = np.where(categ==2)[0]\n\n# Initialize the spatial signal\npi0 = 0.9 # proportion of noise (true null hypotheses)\np0 = int(np.round(pi0 * m))\nsignal = np.zeros(m)\nsignal[(p0 + 1): m] = 1\nsignal = signal.reshape(Dim)\n\n# Add the signal to the field\nfor I in np.arange(len(w2)):\n lat_data.field[:, :, w2[I]] += signal\n\n# Convert the signal to boolean to determine whether the true signal is\nbool_signal = np.zeros(Dim + (L,)) == 0\nbool_signal[:, :, 1] = signal > 0 \n\n# Plot the locaion locations for illustration\nplt.imshow(signal)", "_____no_output_____" ] ], [ [ "## Posthoc Inference", "_____no_output_____" ], [ "### Bootstrapping the Data", "_____no_output_____" ], [ "Bootstrapping is performed using the residuals of the linear model. This gives test-statistics that have the same asymptotic distribution as the limiting test-statistic (under the null). See Eck 2017 and Freedman 1981 for further details. In our context we use these to obtain bootstrapped pivotal statistics which allow us to obtain asymptotic JER control.", "_____no_output_____" ] ], [ [ "# Specify the number of bootstraps to use\nB = 100\n\n# Choose the template to use (by default the linear template is chosen)\ntemplate = 'linear'\n\n# Run the bootstrapped algorithm\nminPperm, orig_pvalues, pivotal_stats, bs = pr.boot_contrasts(lat_data, X, C, B, template, True, 1)", "(5000, 100)\n(1, 5000)\n" ], [ "import matplotlib.pyplot as plt \n\nplt.hist(minPperm)", "_____no_output_____" ] ], [ [ "### Plotting the p-values", "_____no_output_____" ] ], [ [ "pval_sort_idx = np.argsort(np.ravel(orig_pvalues.field))\npvals = np.ravel(orig_pvalues.field)[pval_sort_idx]\n\nfigure, axes = plt.subplots(nrows=1, ncols=2) \nplt.subplot(121)\nplt.hist(np.ravel(orig_pvalues.field), 100)\nplt.title('Histogram of the p-values')\nplt.ylabel('Counts')\n\nplt.subplot(122)\nplt.plot(pvals[:np.min([1000, npvals])])\nplt.title('Smallest 1000 p-values')\nplt.xlabel('k')\nplt.ylabel('p_{(k)}')\n\nfigure.tight_layout(pad=3.0)", "_____no_output_____" ] ], [ [ "### Lambda Calibration", "_____no_output_____" ], [ "Using the bootstrapped pivotal_stats that we have calculated we can choose a value lambda that is the (alpha)% quantile (for some 0 < alpha < 1) of the distribution in order to provide asymptotic JER control at a level alpha.", "_____no_output_____" ] ], [ [ "# Choose the confidence level\nalpha = 0.1\n\n# Obtain the lambda calibration\nlambda_quant = np.quantile(pivotal_stats, alpha)\nprint('Lambda Quantile:', lambda_quant)\n\n# Calculate the number of voxels in the mask\nm = np.sum(lat_data.mask)\n\n# Gives t_k^L(lambda) = lambda*k/m for k = 1, ..., m\nthr = ss.t_linear(lambda_quant, np.arange(1, m + 1), m)", "Lambda Quantile: 0.19733569497293324\n" ] ], [ [ "### PostHoc Bound", "_____no_output_____" ], [ "For a chosen subset of voxels, provide a bound on the number of true null hypotheses within that subset. ", "_____no_output_____" ] ], [ [ "# Get the first 10 pvalues (or any subset of the p-values)\nsubset_pvals = np.sort(np.ravel(orig_pvalues.field))[:10]\n\n# Compute an upper bound on the number of null hypotheses\nbound = ss.max_fp(subset_pvals, thr)\nprint('FP Upper Bound on subset:', bound)", "FP Upper Bound on subset: 0.0\n" ] ], [ [ "### Confidence Envelopes", "_____no_output_____" ] ], [ [ "# These are the confidence envelopes. I.e. for i = 1:npvals, max_FP[i-1] is the upper bound on the number of \n# false positives that occur within the set [p[0], \\dots, p[i-1]] if you were to reject all elements of that set.\nmax_FP = ss.curve_max_fp(subset_pvals, thr) # Confidence envelope on the chosen subset\nprint(max_FP)\nmax_FP = ss.curve_max_fp(pvals, thr) # Confidence envelope on all of them\nprint(max_FP[0: 200])", "[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]\n[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 2.\n 2. 2. 2. 2. 2. 2. 2. 2. 2. 2. 2. 3. 3. 3. 3. 3. 3. 3.\n 3. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 5. 5. 5. 5. 5. 6. 6.\n 6. 6. 6. 7. 7. 7. 7. 7. 8. 8. 8. 8. 9. 9. 9. 9. 9. 9.\n 10. 10. 10. 10. 11. 11. 11. 12. 13. 13. 13. 14. 14. 14. 14. 15. 15. 15.\n 16. 16. 17. 17. 17. 17. 18. 18. 19. 19. 20. 20. 20. 21. 21. 22. 23. 24.\n 25. 26.]\n" ] ], [ [ "#### Plot the FPR and TP curve bounds", "_____no_output_____" ] ], [ [ "# Generate the vector [0,...,npvals]\none2npvals = np.arange(1, npvals + 1)\n\n# Choose the number of p-values (always the smallest ones first) to plot\nlowestnumber = 1000\n\n# Ensure that selected number is not greater than the total number of p-values\nlowestnumber = np.min([lowestnumber, npvals])\n\n# Dividing the envelope by the number of elements in the set gives a bound on the false discovery proportion\nmax_FDP = max_FP[0: lowestnumber] / one2npvals[0: lowestnumber] \nmin_TP = one2npvals[0: lowestnumber] - max_FP[0: lowestnumber]\n\n# Calculate the truth (to determine if it is correctly bounded!)\nsorted_signal = np.ravel(bool_signal)[pval_sort_idx]\nTP = np.zeros(lowestnumber)\nfor I in np.arange(lowestnumber):\n TP[I] = np.sum(sorted_signal[0: I + 1])\n \n# Calculate the true FDP for each subset\nFP = np.zeros(lowestnumber)\nfor I in np.arange(lowestnumber):\n FP[I] = np.sum(abs(sorted_signal[0: I + 1] - 1))\ntrue_FDP = FP / one2npvals[0: lowestnumber] \n \n# Initialize the figure\nfigure = plt.figure(figsize=(10, 4))\n\n# Plot the false discovery proportion and its bound\nplt.subplot(121)\nplt.plot(max_FDP, label='FDP bound')\nplt.plot(true_FDP, label='True FDP')\nplt.title('Upper bound on FDP amongst smallest p-values')\nplt.xlim(1, lowestnumber)\nplt.xlabel('k')\nplt.ylabel('FDP(p_{(1)}, \\dots, p_{(k)}')\nplt.legend(loc=\"upper right\")\n\n# Plot the true postives and their bound\nplt.subplot(122)\nplt.plot(min_TP, label='TP bound')\nplt.plot(TP, label='True Positives')\nplt.title('Lower bound on TP')\nplt.legend(loc=\"upper right\")\nplt.xlim(1, lowestnumber)\nplt.xlabel('k')\nplt.ylabel('TP(p_{(1)}, \\dots, p_{(k)}')\n#figure, axes = plt.subplots(nrows=1, ncols=2) \nfigure.tight_layout(pad=1.0)", "_____no_output_____" ] ], [ [ "As can be seen we obtain an upper bound on the false discovery proportion and a lower bound on the number of true positives with each set. Note that this bound is valid 95% of the time.", "_____no_output_____" ], [ "### Bootstrap paths", "_____no_output_____" ] ], [ [ "for b in np.arange(B):\n plt.plot(bs[:, b], color=\"blue\")\n \n# Calculate reference families\nt_k, _ = pr.t_ref(template)\nm = bs.shape[0]\nlamb = np.arange(11) / 10\nprint(lamb)\nprint(m)\nk = np.arange(m + 1)\n\nfor l in np.arange(len(lamb)):\n plt.plot(t_k(lamb[l], k, m),color=\"black\")\n \nplt.xlim(1, m)\nplt.xlabel('k')\nplt.ylabel('p_{b,(k)}')\nplt.title('Plotting the ordered p-values for each bootstrap')", "[0. 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1. ]\n5000\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ] ]
cbff5d7a0b5efc64a19619dae315e24b6fb23e62
5,861
ipynb
Jupyter Notebook
Python/00_Setup.ipynb
philipmac/SimpleITK-Notebooks
37ce798b457e24f7108f7e7bed63b3c72aedc55c
[ "Apache-2.0" ]
2
2020-10-19T13:17:45.000Z
2020-10-19T13:17:50.000Z
Python/00_Setup.ipynb
philipmac/SimpleITK-Notebooks
37ce798b457e24f7108f7e7bed63b3c72aedc55c
[ "Apache-2.0" ]
null
null
null
Python/00_Setup.ipynb
philipmac/SimpleITK-Notebooks
37ce798b457e24f7108f7e7bed63b3c72aedc55c
[ "Apache-2.0" ]
2
2019-06-21T13:41:48.000Z
2019-06-21T13:42:41.000Z
33.112994
308
0.600751
[ [ [ "<h1 align=\"center\">Welcome to SimpleITK Jupyter Notebooks</h1>\n\n\n## Newcomers to Jupyter Notebooks:\n1. We use two types of cells, code and markdown.\n2. To run a code cell, select it (mouse or arrow key so that it is highlighted) and then press shift+enter which also moves focus to the next cell or ctrl+enter which doesn't.\n3. Closing the browser window does not close the Jupyter server. To close the server, go to the terminal where you ran it and press ctrl+c twice.\n\nFor additional details see the [Jupyter Notebook Quick Start Guide](https://jupyter-notebook-beginner-guide.readthedocs.io/en/latest/index.html).\n\n## SimpleITK Environment Setup\n\nCheck that SimpleITK and auxiliary program(s) are correctly installed in your environment, and that you have the SimpleITK version which you expect (<b>requires network connectivity</b>).\n\nYou can optionally download all of the data used in the notebooks in advance. This step is only necessary if you expect to run the notebooks without network connectivity.", "_____no_output_____" ], [ "The following cell checks that all expected packages are installed.", "_____no_output_____" ] ], [ [ "from __future__ import print_function\nimport importlib\nfrom distutils.version import LooseVersion\n\n# check that all packages are installed (see requirements.txt file)\nrequired_packages = {'jupyter', \n 'numpy',\n 'matplotlib',\n 'ipywidgets',\n 'scipy',\n 'pandas',\n 'SimpleITK'\n }\n\nproblem_packages = list()\n# Iterate over the required packages: If the package is not installed\n# ignore the exception. \nfor package in required_packages:\n try:\n p = importlib.import_module(package) \n except ImportError:\n problem_packages.append(package)\n \nif len(problem_packages) is 0:\n print('All is well.')\nelse:\n print('The following packages are required but not installed: ' \\\n + ', '.join(problem_packages))", "_____no_output_____" ], [ "import SimpleITK as sitk\n\n%run update_path_to_download_script\nfrom downloaddata import fetch_data, fetch_data_all\n\nfrom ipywidgets import interact\n\nprint(sitk.Version())", "_____no_output_____" ] ], [ [ "We expect that you have an external image viewer installed. The default viewer is <a href=\"https://fiji.sc/#download\">Fiji</a>. If you have another viewer (i.e. ITK-SNAP or 3D Slicer) you will need to set an environment variable to point to it. This can be done from within a notebook as shown below.", "_____no_output_____" ] ], [ [ "# Uncomment the line below to change the default external viewer to your viewer of choice and test that it works.\n#%env SITK_SHOW_COMMAND /Applications/ITK-SNAP.app/Contents/MacOS/ITK-SNAP \n\n# Retrieve an image from the network, read it and display using the external viewer. \n# The show method will also set the display window's title and by setting debugOn to True, \n# will also print information with respect to the command it is attempting to invoke.\n# NOTE: The debug information is printed to the terminal from which you launched the notebook\n# server.\nsitk.Show(sitk.ReadImage(fetch_data(\"SimpleITK.jpg\")), \"SimpleITK Logo\", debugOn=True)", "_____no_output_____" ] ], [ [ "Now we check that the ipywidgets will display correctly. When you run the following cell you should see a slider.\n\nIf you don't see a slider please shutdown the Jupyter server, at the command line prompt press Control-c twice, and then run the following command:\n\n```jupyter nbextension enable --py --sys-prefix widgetsnbextension```", "_____no_output_____" ] ], [ [ "interact(lambda x: x, x=(0,10));", "_____no_output_____" ] ], [ [ "Download all of the data in advance if you expect to be working offline (may take a couple of minutes).", "_____no_output_____" ] ], [ [ "fetch_data_all(os.path.join('..','Data'), os.path.join('..','Data','manifest.json'))", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cbff656309ec4a913e77705dba51f56fbe633466
546,192
ipynb
Jupyter Notebook
TSG.ipynb
PranY/Passion-Projects
0950c2043ef66406668ba55a5f1697fff06f0809
[ "MIT" ]
2
2019-03-19T21:12:27.000Z
2019-06-27T05:40:46.000Z
TSG.ipynb
PranY/FastAI_projects
0950c2043ef66406668ba55a5f1697fff06f0809
[ "MIT" ]
1
2018-09-04T22:10:11.000Z
2018-09-04T22:10:11.000Z
TSG.ipynb
PranY/Passion-Projects
0950c2043ef66406668ba55a5f1697fff06f0809
[ "MIT" ]
null
null
null
392.943885
278,990
0.90773
[ [ [ "[View in Colaboratory](https://colab.research.google.com/github/PranY/FastAI_projects/blob/master/TSG.ipynb)", "_____no_output_____" ] ], [ [ "!pip install fastai", "_____no_output_____" ], [ "!pip install torch_nightly -f https://download.pytorch.org/whl/nightly/cu92/torch_nightly.html", "_____no_output_____" ], [ "! pip install kaggle\n", "_____no_output_____" ], [ "! pip install tqdm", "Requirement already satisfied: tqdm in /usr/local/lib/python3.6/dist-packages (4.27.0)\n" ], [ "from google.colab import drive\ndrive.mount('/content/drive')", "_____no_output_____" ], [ "! ls \"drive/My Drive\"", "_____no_output_____" ], [ "! cp drive/My\\ Drive/kaggle.json ~/.kaggle/", "_____no_output_____" ], [ "! kaggle competitions download -c tgs-salt-identification-challenge", "_____no_output_____" ], [ "! python -c 'import fastai; print(fastai.__version__)'\n! python -c 'import fastai; fastai.show_install(0)'", "1.0.7\n\n\n```text\n=== Software === \npython version : 3.6.6\nfastai version : 1.0.7\ntorch version : 1.0.0.dev20181019\nnvidia driver : 396.44\ntorch cuda ver : 9.2.148\ntorch cuda is : available\ntorch cudnn ver : 7104\ntorch cudnn is : enabled\n\n=== Hardware === \nnvidia gpus : 1\ntorch available : 1\n - gpu0 : 11441MB | Tesla K80\n\n=== Environment === \nplatform : Linux-4.14.65+-x86_64-with-Ubuntu-18.04-bionic\ndistro : #1 SMP Sun Sep 9 02:18:33 PDT 2018\nconda env : Unknown\npython : /usr/bin/python3\nsys.path : \n/env/python\n/usr/lib/python36.zip\n/usr/lib/python3.6\n/usr/lib/python3.6/lib-dynload\n/usr/local/lib/python3.6/dist-packages\n/usr/lib/python3/dist-packages\n/usr/local/lib/python3.6/dist-packages/IPython/extensions\n```\n\nPlease make sure to include opening/closing ``` when you paste into forums/github to make the reports appear formatted as code sections.\n\nOptional package(s) to enhance the diagnostics can be installed with:\npip install distro\nOnce installed, re-run this utility to get the additional information\n" ], [ "!ls", "depths.csv sample_data\t\t test.zip train.zip\ndrive\t sample_submission.csv train.csv\n" ], [ "# ! rm -r train/\n# !rm -r test/", "_____no_output_____" ], [ "! mkdir train\n! mkdir test", "_____no_output_____" ], [ "! unzip train.zip -d train", "_____no_output_____" ], [ "! unzip test.zip -d test", "_____no_output_____" ], [ "! ls train/images | wc -l\n! ls train/masks | wc -l\n! ls test/images | wc -l", "4000\n4000\n18000\n" ], [ "%matplotlib inline\n%reload_ext autoreload\n%autoreload 2", "_____no_output_____" ], [ "from tqdm import tqdm_notebook", "_____no_output_____" ], [ "from fastai import *\nfrom fastai.vision import *\n#from fastai.docs import *\nimport PIL", "_____no_output_____" ], [ "# Loading of training/testing ids and depths\ntrain_df = pd.read_csv(\"train.csv\", index_col=\"id\", usecols=[0])\ndepths_df = pd.read_csv(\"depths.csv\", index_col=\"id\")\ntrain_df = train_df.join(depths_df)\ntest_df = depths_df[~depths_df.index.isin(train_df.index)]\nnum_workers=0\nlen(train_df)\n", "_____no_output_____" ], [ "PATH_X = Path('train/images')\nPATH_Y = Path('train/masks')", "_____no_output_____" ], [ "# def resize2d(fn:PathOrStr, sz) -> Image:\n# img = PIL.Image.open(fn)\n# img = img.resize((sz,sz), PIL.Image.BILINEAR)\n# img.save(fn)", "_____no_output_____" ], [ "# for l in list(PATH_X.iterdir()):\n# resize2d(l,128)", "_____no_output_____" ], [ "# for l in list(PATH_Y.iterdir()):\n# resize2d(l,128)", "_____no_output_____" ], [ "# Reducing mask images to {0,1}\n\ndef FormatMask(fn:PathOrStr) -> Image:\n\n img = PIL.Image.open(fn).convert('L')\n\n # Let numpy do the heavy lifting for converting pixels to pure black or white\n bw = np.asarray(img).copy()\n\n # Pixel range is 0...255, 256/2 = 128\n bw[bw < 128] = 0 # Black\n bw[bw >= 128] = 1 # White\n\n # Now we put it back in Pillow/PIL land\n imfile = PIL.Image.fromarray(bw)\n imfile.save(fn)", "_____no_output_____" ], [ "for l in list(PATH_Y.iterdir()):\n FormatMask(l)", "_____no_output_____" ], [ "class ImageMask(Image):\n \"Class for image segmentation target.\"\n def lighting(self, func:LightingFunc, *args:Any, **kwargs:Any)->'Image': return self\n\n def refresh(self):\n self.sample_kwargs['mode'] = 'bilinear'\n return super().refresh()\n\n @property\n def data(self)->TensorImage:\n \"Return this image pixels as a `LongTensor`.\"\n return self.px.long()\n \n def show(self, ax:plt.Axes=None, figsize:tuple=(3,3), title:Optional[str]=None, hide_axis:bool=True, \n cmap:str='viridis', alpha:float=0.5):\n ax = _show_image(self, ax=ax, hide_axis=hide_axis, cmap=cmap, figsize=figsize, alpha=alpha)\n if title: ax.set_title(title)\n \ndef open_mask(fn:PathOrStr)->ImageMask:\n \"Return `ImageMask` object create from mask in file `fn`.\"\n x = PIL.Image.open(fn).convert('L')\n return ImageMask(pil2tensor(x).float().div_(255))\n \ndef _show_image(img:Image, ax:plt.Axes=None, figsize:tuple=(3,3), hide_axis:bool=True, cmap:str='binary',\n alpha:float=None)->plt.Axes:\n if ax is None: fig,ax = plt.subplots(figsize=figsize)\n ax.imshow(image2np(img.data), cmap=cmap, alpha=alpha)\n if hide_axis: ax.axis('off')\n return ax", "_____no_output_____" ], [ "img = next(PATH_X.iterdir())\nopen_image(img).show()\nopen_image(img).size", "_____no_output_____" ], [ "def get_y_fn(x_fn): return PATH_Y/f'{x_fn.name[:-4]}.png'\n\nimg_y_f = get_y_fn(img)\nopen_mask(img_y_f).show()\nopen_mask(img_y_f).size", "_____no_output_____" ], [ "x = open_image(img)\nx.show(y=open_mask(img_y_f))\nx.shape", "_____no_output_____" ], [ "open_image(img).shape, open_mask(img_y_f).shape", "_____no_output_____" ], [ "def get_datasets(path):\n x_fns = [o for o in path.iterdir() if o.is_file()]\n y_fns = [get_y_fn(o) for o in x_fns]\n mask = [o>=1000 for o in range(len(x_fns))]\n arrs = arrays_split(mask, x_fns, y_fns)\n return [SegmentationDataset(*o) for o in arrs]\n", "_____no_output_____" ], [ "train_ds,valid_ds = get_datasets(PATH_X)\ntrain_ds,valid_ds", "_____no_output_____" ], [ "x,y = next(iter(train_ds))\nx.shape, y.shape, type(x), type(y)", "_____no_output_____" ], [ "size = 128", "_____no_output_____" ], [ "def get_tfm_datasets(size):\n datasets = get_datasets(PATH_X)\n tfms = get_transforms(do_flip=True, max_rotate=4, max_lighting=0.2)\n return transform_datasets(train_ds, valid_ds, tfms=tfms, tfm_y=True, size=size, padding_mode='border')", "_____no_output_____" ], [ "train_tds, *_ = get_tfm_datasets(size)", "_____no_output_____" ], [ "for i in range(0,3):\n train_tds[i][0].show()", "_____no_output_____" ], [ "for i in range(0,3):\n train_tds[i][1].show()", "_____no_output_____" ], [ "_,axes = plt.subplots(1,4, figsize=(12,6))\nfor i, ax in enumerate(axes.flat):\n imgx,imgy = train_tds[i]\n imgx.show(ax, y=imgy)", "_____no_output_____" ], [ "default_norm,default_denorm = normalize_funcs( mean=tensor([0.4850, 0.4560, 0.4060]), std=tensor([0.2290, 0.2240, 0.2250]))\nbs = 32", "_____no_output_____" ], [ "def get_data(size, bs):\n return DataBunch.create(*get_tfm_datasets(size), bs=bs, tfms=default_norm)", "_____no_output_____" ], [ "data = get_data(size, bs)", "_____no_output_____" ], [ "#export\ndef show_xy_images(x:Tensor,y:Tensor,rows:int,figsize:tuple=(9,9)):\n \"Shows a selection of images and targets from a given batch.\"\n fig, axs = plt.subplots(rows,rows,figsize=figsize)\n for i, ax in enumerate(axs.flatten()): show_image(x[i], y=y[i], ax=ax)\n plt.tight_layout()", "_____no_output_____" ], [ "x,y = next(iter(data.train_dl))\nx,y = x.cpu(),y.cpu()\nx = default_denorm(x)\nshow_xy_images(x,y,4, figsize=(9,9))\nx.shape, y.shape", "_____no_output_____" ], [ "head = std_upsample_head(2, 512,256,256,256,256)\nhead", "_____no_output_____" ], [ "def dice(input:Tensor, targs:Tensor) -> Rank0Tensor:\n \"Dice coefficient metric for binary target\"\n n = targs.shape[0]\n input = input.argmax(dim=1).view(n,-1)\n targs = targs.view(n,-1)\n intersect = (input*targs).sum().float()\n union = (input+targs).sum().float()\n return 2. * intersect / union\n\ndef accuracy(input:Tensor, targs:Tensor) -> Rank0Tensor:\n \"Accuracy\"\n n = targs.shape[0]\n input = input.argmax(dim=1).view(n,-1)\n targs = targs.view(n,-1)\n return (input==targs).float().mean()", "_____no_output_____" ], [ "metrics=[accuracy, dice]", "_____no_output_____" ], [ "learn = ConvLearner(data, models.resnet34, custom_head=head,\n metrics=metrics)", "_____no_output_____" ], [ "lr_find(learn)\nlearn.recorder.plot()", "epoch train loss valid loss accuracy dice\n1 6.040244 \n" ], [ "learn.loss_func", "_____no_output_____" ], [ "lr = 1e-1", "_____no_output_____" ], [ "learn.fit_one_cycle(10, slice(lr))", "epoch train loss valid loss accuracy dice\n" ], [ "# memory footprint support libraries/code\n!ln -sf /opt/bin/nvidia-smi /usr/bin/nvidia-smi\n!pip install gputil\n!pip install psutil\n!pip install humanize\nimport psutil\nimport humanize\nimport os\nimport GPUtil as GPU\nGPUs = GPU.getGPUs()\n# XXX: only one GPU on Colab and isn’t guaranteed\ngpu = GPUs[0]\ndef printm():\n process = psutil.Process(os.getpid())\n print(\"Gen RAM Free: \" + humanize.naturalsize( psutil.virtual_memory().available ), \" | Proc size: \" + humanize.naturalsize( process.memory_info().rss))\n print(\"GPU RAM Free: {0:.0f}MB | Used: {1:.0f}MB | Util {2:3.0f}% | Total {3:.0f}MB\".format(gpu.memoryFree, gpu.memoryUsed, gpu.memoryUtil*100, gpu.memoryTotal))\nprintm()", "Collecting gputil\n Downloading https://files.pythonhosted.org/packages/45/99/837428d26b47ebd6b66d6e1b180e98ec4a557767a93a81a02ea9d6242611/GPUtil-1.3.0.tar.gz\nRequirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from gputil) (1.15.2)\nBuilding wheels for collected packages: gputil\n Running setup.py bdist_wheel for gputil ... \u001b[?25l-\b \bdone\n\u001b[?25h Stored in directory: /root/.cache/pip/wheels/17/0f/04/b79c006972335e35472c0b835ed52bfc0815258d409f560108\nSuccessfully built gputil\nInstalling collected packages: gputil\nSuccessfully installed gputil-1.3.0\nRequirement already satisfied: psutil in /usr/local/lib/python3.6/dist-packages (5.4.7)\nCollecting humanize\n Downloading https://files.pythonhosted.org/packages/8c/e0/e512e4ac6d091fc990bbe13f9e0378f34cf6eecd1c6c268c9e598dcf5bb9/humanize-0.5.1.tar.gz\nBuilding wheels for collected packages: humanize\n Running setup.py bdist_wheel for humanize ... \u001b[?25l-\b \bdone\n\u001b[?25h Stored in directory: /root/.cache/pip/wheels/69/86/6c/f8b8593bc273ec4b0c653d3827f7482bb2001a2781a73b7f44\nSuccessfully built humanize\nInstalling collected packages: humanize\nSuccessfully installed humanize-0.5.1\nGen RAM Free: 11.6 GB | Proc size: 2.1 GB\nGPU RAM Free: 10883MB | Used: 558MB | Util 5% | Total 11441MB\n" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbff6b3b4163d0ecad7d8863c600671f33d3c3f4
425,109
ipynb
Jupyter Notebook
human_part_segmentation_demo.ipynb
mayankgrwl97/human-parser
dd42de6404876496989dec12b106d21872082bcf
[ "MIT" ]
3
2021-02-22T01:50:17.000Z
2022-02-07T09:18:27.000Z
human_part_segmentation_demo.ipynb
mayankgrwl97/human-parser
dd42de6404876496989dec12b106d21872082bcf
[ "MIT" ]
null
null
null
human_part_segmentation_demo.ipynb
mayankgrwl97/human-parser
dd42de6404876496989dec12b106d21872082bcf
[ "MIT" ]
1
2021-12-10T10:55:02.000Z
2021-12-10T10:55:02.000Z
2,624.12963
413,715
0.964411
[ [ [ "<a href=\"https://colab.research.google.com/github/mayankgrwl97/human-part-segmentation/blob/master/human_part_segmentation_demo.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "# Install human_part_segmentation package\n!git clone https://github.com/mayankgrwl97/human-part-segmentation.git\n!cd human-part-segmentation && pip install .", "_____no_output_____" ], [ "# Install ninja to load C++ dependencies in torch\n!apt-get install ninja-build", "_____no_output_____" ], [ "from human_part_segmentation import HumanPartSegmentation\n\n# Instantiate pre-trained model trained on 'lip' dataset\nhps = HumanPartSegmentation(dataset='lip', gpu='0')\n\n# Print allowed labels\nprint(hps.labels)", "['Background', 'Hat', 'Hair', 'Glove', 'Sunglasses', 'Upper-clothes', 'Dress', 'Coat', 'Socks', 'Pants', 'Jumpsuits', 'Scarf', 'Skirt', 'Face', 'Left-arm', 'Right-arm', 'Left-leg', 'Right-leg', 'Left-shoe', 'Right-shoe']\n" ], [ "from IPython import display\n\n# Demo image\nimg_path = '/content/human-part-segmentation/demo/demo.jpg'\n\n# Visualize demo image\ndisplay.Image(img_path)", "_____no_output_____" ], [ "from PIL import Image\n\n# Get unified segmentation mask for 'Face' and 'Hair'\nface_hair_mask = hps.get_part_mask(img_path, part_labels=['Face', 'Hair'])\n\n# Visualize predicted mask\ndisplay.display(Image.fromarray(face_hair_mask))", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cbff712c010773c77c419b451406a7bf708d6625
194,299
ipynb
Jupyter Notebook
Titanic.ipynb
sarahfang9292/junejun.github.io
1f8aa52f5310e47cb4c342e782b63b995e31bf00
[ "MIT" ]
null
null
null
Titanic.ipynb
sarahfang9292/junejun.github.io
1f8aa52f5310e47cb4c342e782b63b995e31bf00
[ "MIT" ]
null
null
null
Titanic.ipynb
sarahfang9292/junejun.github.io
1f8aa52f5310e47cb4c342e782b63b995e31bf00
[ "MIT" ]
null
null
null
50.401816
26,172
0.431804
[ [ [ "import pandas as pd #数据分析\nimport numpy as np #科学计算\nfrom pandas import Series,DataFrame\n\ndata_train = pd.read_csv(\"/Users/zhijun/Desktop/Titanic/all/train.csv\")\ndata_train.columns", "_____no_output_____" ], [ "data_train.info()", "<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 891 entries, 0 to 890\nData columns (total 12 columns):\nPassengerId 891 non-null int64\nSurvived 891 non-null int64\nPclass 891 non-null int64\nName 891 non-null object\nSex 891 non-null object\nAge 714 non-null float64\nSibSp 891 non-null int64\nParch 891 non-null int64\nTicket 891 non-null object\nFare 891 non-null float64\nCabin 204 non-null object\nEmbarked 889 non-null object\ndtypes: float64(2), int64(5), object(5)\nmemory usage: 83.6+ KB\n" ], [ "data_train.describe()", "_____no_output_____" ], [ "import matplotlib.pyplot as plt\nfig = plt.figure()\nfig.set(alpha=0.2) # 设定图表颜色alpha参数\n\nplt.subplot2grid((4,6),(0,0))\ndata_train.Survived.value_counts().plot(kind='pie')\nplt.title('Number of Survival')\nplt.ylabel('Number of people')\n\nplt.subplot2grid((4,6),(0,2))\ndata_train.Pclass.value_counts().plot(kind='bar')\nplt.title('Class Distribution')\nplt.ylabel('Number of people')\n\nplt.subplot2grid((4,6),(0,4))\nplt.scatter(data_train.Survived,data_train.Age)\nplt.ylabel('Age')\nplt.grid(b=True, which='major', axis='y')# 显示网格线\nplt.title('Survival in Age')\n\nplt.subplot2grid((4,6),(2,0),colspan=2)\ndata_train.Age[data_train.Pclass==1].plot(kind='kde')\ndata_train.Age[data_train.Pclass==2].plot(kind='kde')\ndata_train.Age[data_train.Pclass==3].plot(kind='kde')\nplt.xlabel('Age')\nplt.ylabel('Density')\nplt.title('Age Density in Classes')\nplt.legend(('First', 'Second','Third'),loc='best')\n\nplt.subplot2grid((4,6),(2,4))\ndata_train.Embarked.value_counts().plot(kind='bar')\nplt.title('Number of People from Docks')\nplt.ylabel('Nubmber of People') \nplt.show()", "_____no_output_____" ], [ "\nfig = plt.figure()\n\nfig1=fig.add_subplot(141)\ndata_train.Survived[data_train.Pclass !=3][data_train.Sex=='female'].value_counts().plot(kind='bar',label='Female in High Cabin',color='green')\nplt.ylabel('Number of People')\nplt.title('Female in High Cabin')\n\nfig2=fig.add_subplot(142,sharey=fig1)\ndata_train.Survived[data_train.Pclass==3][data_train.Sex=='female'].value_counts().plot(kind='bar',label='Female in Low Cabin',color='red')\nplt.ylabel('Number of People')\nplt.title('Female in Low Cabin')\n\nfig3=fig.add_subplot(143,sharey=fig1)\ndata_train.Survived[data_train.Pclass != 3][data_train.Sex=='male'].value_counts().plot(kind='bar',color='blue')\nplt.ylabel('Number of People')\nplt.title('Male in High Cabin')\n\nfig4=fig.add_subplot(144,sharey=fig1)\ndata_train.Survived[data_train.Pclass==3][data_train.Sex=='male'].value_counts().plot(kind='bar',color='orange')\nplt.ylabel('Number of People')\nplt.title('Male in Low Cabin')", "_____no_output_____" ], [ "new=data_train.groupby(['SibSp','Survived'])\nf=pd.DataFrame(new.count()['PassengerId'])\nf", "_____no_output_____" ], [ "p=data_train.groupby(['Parch','Survived'])\nf=pd.DataFrame(p.count()['PassengerId'])\nf", "_____no_output_____" ], [ "from sklearn.ensemble import RandomForestRegressor# 使用 RandomForestClassifier 填补缺失的年龄属性", "_____no_output_____" ], [ "def set_missing_ages(df):\n \n # 把已有的数值型特征取出来丢进Random Forest Regressor中\n age_df = df[['Age','Fare', 'Parch', 'SibSp', 'Pclass']]\n\n # 乘客分成已知年龄和未知年龄两部分\n known_age = age_df[age_df.Age.notnull()].as_matrix()\n unknown_age = age_df[age_df.Age.isnull()].as_matrix()\n\n # y即目标年龄\n y = known_age[:, 0]\n\n # X即特征属性值\n X = known_age[:, 1:]\n\n # fit到RandomForestRegressor之中\n rfr = RandomForestRegressor(random_state=0, n_estimators=2000, n_jobs=-1)\n rfr.fit(X, y)\n \n # 用得到的模型进行未知年龄结果预测\n predictedAges = rfr.predict(unknown_age[:, 1::])\n \n # 用得到的预测结果填补原缺失数据\n df.loc[ (df.Age.isnull()), 'Age' ] = predictedAges \n \n return df, rfr\n\ndef set_Cabin_type(df):\n df.loc[ (df.Cabin.notnull()), 'Cabin' ] = \"Yes\"\n df.loc[ (df.Cabin.isnull()), 'Cabin' ] = \"No\"\n return df\n\ndata_train, rfr = set_missing_ages(data_train)\ndata_train = set_Cabin_type(data_train)\ndata_train", "_____no_output_____" ], [ "def set_Cabin_type(df):\n df.loc[ (df.Cabin.notnull()), 'Cabin' ] = \"Yes\"\n df.loc[ (df.Cabin.isnull()), 'Cabin' ] = \"No\"\n return df\ndummy_cabin=pd.get_dummies(data_train['Cabin'],prefix='Cabin')\ndummy_Embarked=pd.get_dummies(data_train['Embarked'],prefix='Embarked')\ndummy_Sex = pd.get_dummies(data_train['Sex'], prefix= 'Sex')\ndummy_Pclass = pd.get_dummies(data_train['Pclass'], prefix= 'Pclass')\ndf=pd.concat([data_train,dummy_cabin,dummy_Embarked,dummy_Sex,dummy_Pclass],axis=1)\ndf.drop(['Pclass', 'Name', 'Sex', 'Ticket', 'Cabin', 'Embarked'],axis=1,inplace=True)\ndf", "_____no_output_____" ], [ "import sklearn.preprocessing as preprocessing\nscaler = preprocessing.StandardScaler()\nage_scale_param = scaler.fit(df['Age']).reshape(-1, 1)\ndf['Age_scaled'] = scaler.fit_transform(df['Age'], age_scale_param)\nfare_scale_param = scaler.fit(df['Fare']).reshape(-1, 1)\ndf['Fare_scaled'] = scaler.fit_transform(df['Fare'], fare_scale_param)\ndf", "_____no_output_____" ], [ "from sklearn import linear_model\ntrain_df=df.filter(regex='Survived|Age_.*|SibSp|Parch|Fare_.*|Cabin_.*|Embarked_.*|Sex_.*|Pclass_.*')\ntrain_np=train_df.as_matrix()\ny = train_np[:, 0]# y即Survival结果\nX = train_np[:, 1:]# X即特征属性值\nclf = linear_model.LogisticRegression(C=1.0, penalty='l1', tol=1e-6)\nclf.fit(X, y)\n \nclf", "_____no_output_____" ], [ "X.shape", "_____no_output_____" ], [ "data_test = pd.read_csv(\"/Users/zhijun/Desktop/Titanic/all/test.csv\")\ndata_test.loc[ (data_test.Fare.isnull()), 'Fare' ] = 0\n# 接着我们对test_data做和train_data中一致的特征变换\n# 首先用同样的RandomForestRegressor模型填上丢失的年龄\ntmp_df = data_test[['Age','Fare', 'Parch', 'SibSp', 'Pclass']]\nnull_age = tmp_df[data_test.Age.isnull()].as_matrix()\n# 根据特征属性X预测年龄并补上\nX = null_age[:, 1:]\npredictedAges = rfr.predict(X)\ndata_test.loc[ (data_test.Age.isnull()), 'Age' ] = predictedAges\n\ndata_test = set_Cabin_type(data_test)\ndummies_Cabin = pd.get_dummies(data_test['Cabin'], prefix= 'Cabin')\ndummies_Embarked = pd.get_dummies(data_test['Embarked'], prefix= 'Embarked')\ndummies_Sex = pd.get_dummies(data_test['Sex'], prefix= 'Sex')\ndummies_Pclass = pd.get_dummies(data_test['Pclass'], prefix= 'Pclass')\n\n\ndf_test = pd.concat([data_test, dummies_Cabin, dummies_Embarked, dummies_Sex, dummies_Pclass], axis=1)\ndf_test.drop(['Pclass', 'Name', 'Sex', 'Ticket', 'Cabin', 'Embarked'], axis=1, inplace=True)\ndf_test\nX.shape", "_____no_output_____" ], [ "test = df_test.filter(regex='Age_.*|SibSp|Parch|Fare_.*|Cabin_.*|Embarked_.*|Sex_.*|Pclass_.*')\npredictions = clf.predict(test)\nresult = pd.DataFrame({'PassengerId':data_test['PassengerId'].as_matrix(), 'Survived':predictions.astype(np.int32)})\nresult.to_csv(\"logistic_regression_predictions.csv\", index=False)", "_____no_output_____" ], [ "pd.read_csv(\"logistic_regression_predictions.csv\")", "_____no_output_____" ], [ "import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.learning_curve import learning_curve\n\n# 用sklearn的learning_curve得到training_score和cv_score,使用matplotlib画出learning curve\ndef plot_learning_curve(estimator, title, X, y, ylim=None, cv=None, n_jobs=1, \n train_sizes=np.linspace(.05, 1., 20), verbose=0, plot=True):\n \"\"\"\n 画出data在某模型上的learning curve.\n 参数解释\n ----------\n estimator : 你用的分类器。\n title : 表格的标题。\n X : 输入的feature,numpy类型\n y : 输入的target vector\n ylim : tuple格式的(ymin, ymax), 设定图像中纵坐标的最低点和最高点\n cv : 做cross-validation的时候,数据分成的份数,其中一份作为cv集,其余n-1份作为training(默认为3份)\n n_jobs : 并行的的任务数(默认1)\n \"\"\"\n train_sizes, train_scores, test_scores = learning_curve(\n estimator, X, y, cv=cv, n_jobs=n_jobs, train_sizes=train_sizes, verbose=verbose)\n \n train_scores_mean = np.mean(train_scores, axis=1)\n train_scores_std = np.std(train_scores, axis=1)\n test_scores_mean = np.mean(test_scores, axis=1)\n test_scores_std = np.std(test_scores, axis=1)\n \n if plot:\n plt.figure()\n plt.title(title)\n if ylim is not None:\n plt.ylim(*ylim)\n plt.xlabel(u\"训练样本数\")\n plt.ylabel(u\"得分\")\n plt.gca().invert_yaxis()\n plt.grid()\n \n plt.fill_between(train_sizes, train_scores_mean - train_scores_std, train_scores_mean + train_scores_std, \n alpha=0.1, color=\"b\")\n plt.fill_between(train_sizes, test_scores_mean - test_scores_std, test_scores_mean + test_scores_std, \n alpha=0.1, color=\"r\")\n plt.plot(train_sizes, train_scores_mean, 'o-', color=\"b\", label=u\"训练集上得分\")\n plt.plot(train_sizes, test_scores_mean, 'o-', color=\"r\", label=u\"交叉验证集上得分\")\n \n plt.legend(loc=\"best\")\n \n plt.draw()\n plt.gca().invert_yaxis()\n plt.show()\n \n midpoint = ((train_scores_mean[-1] + train_scores_std[-1]) + (test_scores_mean[-1] - test_scores_std[-1])) / 2\n diff = (train_scores_mean[-1] + train_scores_std[-1]) - (test_scores_mean[-1] - test_scores_std[-1])\n return midpoint, diff\nX.shape\n#plot_learning_curve(clf, u\"学习曲线\", X, y)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbff7eff01140bbdcb9d422be8befc29b9c85b01
118,732
ipynb
Jupyter Notebook
quantopian/lectures/Spearman_Rank_Correlation/notebook.ipynb
LinuxIsCool/binder
0a59cb7f087e6c1418c9a079341e27f4d69bb448
[ "Apache-2.0" ]
1
2021-10-10T20:00:46.000Z
2021-10-10T20:00:46.000Z
quantopian/lectures/Spearman_Rank_Correlation/notebook.ipynb
LinuxIsCool/binder
0a59cb7f087e6c1418c9a079341e27f4d69bb448
[ "Apache-2.0" ]
null
null
null
quantopian/lectures/Spearman_Rank_Correlation/notebook.ipynb
LinuxIsCool/binder
0a59cb7f087e6c1418c9a079341e27f4d69bb448
[ "Apache-2.0" ]
1
2021-08-05T15:14:04.000Z
2021-08-05T15:14:04.000Z
210.51773
28,320
0.891377
[ [ [ "# Measuring Monotonic Relationships\nBy Evgenia \"Jenny\" Nitishinskaya and Delaney Granizo-Mackenzie with example algorithms by David Edwards\n\nReference: DeFusco, Richard A. \"Tests Concerning Correlation: The Spearman Rank Correlation Coefficient.\" Quantitative Investment Analysis. Hoboken, NJ: Wiley, 2007\n\n\nPart of the Quantopian Lecture Series:\n\n* [www.quantopian.com/lectures](https://www.quantopian.com/lectures)\n* [github.com/quantopian/research_public](https://github.com/quantopian/research_public)\n\n\n---\n\nThe Spearman Rank Correlation Coefficient allows us to determine whether or not two data series move together; that is, when one increases (decreases) the other also increases (decreases). This is more general than a linear relationship; for instance, $y = e^x$ is a monotonic function, but not a linear one. Therefore, in computing it we compare not the raw data but the ranks of the data.\n\nThis is useful when your data sets may be in different units, and therefore not linearly related (for example, the price of a square plot of land and its side length, since the price is more likely to be linear in the area). It's also suitable for data sets which not satisfy the assumptions that other tests require, such as the observations being normally distributed as would be necessary for a t-test.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport scipy.stats as stats\nimport matplotlib.pyplot as plt\nimport math", "_____no_output_____" ], [ "# Example of ranking data\nl = [10, 9, 5, 7, 5]\nprint 'Raw data: ', l\nprint 'Ranking: ', list(stats.rankdata(l, method='average'))", "Raw data: [10, 9, 5, 7, 5]\nRanking: [5.0, 4.0, 1.5, 3.0, 1.5]\n" ] ], [ [ "## Spearman Rank Correlation\n\n### Intuition\n\nThe intution is now that instead of looking at the relationship between the two variables, we look at the relationship between the ranks. This is robust to outliers and the scale of the data.\n\n### Definition\n\nThe argument `method='average'` indicates that when we have a tie, we average the ranks that the numbers would occupy. For example, the two 5's above, which would take up ranks 1 and 2, each get assigned a rank of $1.5$.\n\nTo compute the Spearman rank correlation for two data sets $X$ and $Y$, each of size $n$, we use the formula\n$$r_S = 1 - \\frac{6 \\sum_{i=1}^n d_i^2}{n(n^2 - 1)}$$\n\nwhere $d_i$ is the difference between the ranks of the $i$th pair of observations, $X_i - Y_i$.\n\nThe result will always be between $-1$ and $1$. A positive value indicates a positive relationship between the variables, while a negative value indicates an inverse relationship. A value of 0 implies the absense of any monotonic relationship. This does not mean that there is no relationship; for instance, if $Y$ is equal to $X$ with a delay of 2, they are related simply and precisely, but their $r_S$ can be close to zero:", "_____no_output_____" ], [ "##Experiment\n\nLet's see what happens if we draw $X$ from a poisson distribution (non-normal), and then set $Y = e^X + \\epsilon$ where $\\epsilon$ is drawn from another poisson distribution. We'll take the spearman rank and the correlation coefficient on this data and then run the entire experiment many times. Because $e^X$ produces many values that are far away from the rest, we can this of this as modeling 'outliers' in our data. Spearman rank compresses the outliers and does better at measuring correlation. Normal correlation is confused by the outliers and on average will measure less of a relationship than is actually there.", "_____no_output_____" ] ], [ [ "## Let's see an example of this\nn = 100\n\ndef compare_correlation_and_spearman_rank(n, noise):\n X = np.random.poisson(size=n)\n Y = np.exp(X) + noise * np.random.normal(size=n)\n\n Xrank = stats.rankdata(X, method='average')\n # n-2 is the second to last element\n Yrank = stats.rankdata(Y, method='average')\n\n diffs = Xrank - Yrank # order doesn't matter since we'll be squaring these values\n r_s = 1 - 6*sum(diffs*diffs)/(n*(n**2 - 1))\n c_c = np.corrcoef(X, Y)[0,1]\n \n return r_s, c_c\n\nexperiments = 1000\nspearman_dist = np.ndarray(experiments)\ncorrelation_dist = np.ndarray(experiments)\nfor i in range(experiments):\n r_s, c_c = compare_correlation_and_spearman_rank(n, 1.0)\n spearman_dist[i] = r_s\n correlation_dist[i] = c_c\n \nprint 'Spearman Rank Coefficient: ' + str(np.mean(spearman_dist))\n# Compare to the regular correlation coefficient\nprint 'Correlation coefficient: ' + str(np.mean(correlation_dist))", "Spearman Rank Coefficient: 0.877239879988\nCorrelation coefficient: 0.773013362941\n" ] ], [ [ "Let's take a look at the distribution of measured correlation coefficients and compare the spearman with the regular metric.", "_____no_output_____" ] ], [ [ "plt.hist(spearman_dist, bins=50, alpha=0.5)\nplt.hist(correlation_dist, bins=50, alpha=0.5)\nplt.legend(['Spearman Rank', 'Regular Correlation'])\nplt.xlabel('Correlation Coefficient')\nplt.ylabel('Frequency');", "_____no_output_____" ] ], [ [ "Now let's see how the Spearman rank and Regular coefficients cope when we add more noise to the situation.", "_____no_output_____" ] ], [ [ "n = 100\nnoises = np.linspace(0, 3, 30)\nexperiments = 100\nspearman = np.ndarray(len(noises))\ncorrelation = np.ndarray(len(noises))\n\nfor i in range(len(noises)):\n # Run many experiments for each noise setting\n rank_coef = 0.0\n corr_coef = 0.0\n noise = noises[i]\n for j in range(experiments):\n r_s, c_c = compare_correlation_and_spearman_rank(n, noise)\n rank_coef += r_s\n corr_coef += c_c\n spearman[i] = rank_coef/experiments\n correlation[i] = corr_coef/experiments\n \nplt.scatter(noises, spearman, color='r')\nplt.scatter(noises, correlation)\nplt.legend(['Spearman Rank', 'Regular Correlation'])\nplt.xlabel('Amount of Noise')\nplt.ylabel('Average Correlation Coefficient')", "_____no_output_____" ] ], [ [ "We can see that the Spearman rank correlation copes with the non-linear relationship much better at most levels of noise. Interestingly, at very high levels, it seems to do worse than regular correlation.", "_____no_output_____" ], [ "##Delay in correlation\n\nOf you might have the case that one process affects another, but after a time lag. Now let's see what happens if we add the delay.", "_____no_output_____" ] ], [ [ "n = 100\n\nX = np.random.rand(n)\nXrank = stats.rankdata(X, method='average')\n# n-2 is the second to last element\nYrank = stats.rankdata([1,1] + list(X[:(n-2)]), method='average')\n\ndiffs = Xrank - Yrank # order doesn't matter since we'll be squaring these values\nr_s = 1 - 6*sum(diffs*diffs)/(n*(n**2 - 1))\nprint r_s", "-0.00584158415842\n" ] ], [ [ "Sure enough, the relationship is not detected. It is important when using both regular and spearman correlation to check for lagged relationships by offsetting your data and testing for different offset values.", "_____no_output_____" ], [ "##Built-In Function\n\nWe can also use the `spearmanr` function in the `scipy.stats` library:", "_____no_output_____" ] ], [ [ "# Generate two random data sets\nnp.random.seed(161)\nX = np.random.rand(10)\nY = np.random.rand(10)\n\nr_s = stats.spearmanr(X, Y)\nprint 'Spearman Rank Coefficient: ', r_s[0]\nprint 'p-value: ', r_s[1]", "Spearman Rank Coefficient: 0.236363636364\np-value: 0.510885317515\n" ] ], [ [ "We now have ourselves an $r_S$, but how do we interpret it? It's positive, so we know that the variables are not anticorrelated. It's not very large, so we know they aren't perfectly positively correlated, but it's hard to say from a glance just how significant the correlation is. Luckily, `spearmanr` also computes the p-value for this coefficient and sample size for us. We can see that the p-value here is above 0.05; therefore, we cannot claim that $X$ and $Y$ are correlated.\n\n##Real World Example: Mutual Fund Expense Ratio\n\nNow that we've seen how Spearman rank correlation works, we'll quickly go through the process again with some real data. For instance, we may wonder whether the expense ratio of a mutual fund is indicative of its three-year Sharpe ratio. That is, does spending more money on administration, management, etc. lower the risk or increase the returns? Quantopian does not currently support mutual funds, so we will pull the data from Yahoo Finance. Our p-value cutoff will be the usual default of 0.05.\n\n### Data Source\n\nThanks to [Matthew Madurski](https://github.com/dursk) for the data. To obtain the same data:\n\n1. Download the csv from this link. https://gist.github.com/dursk/82eee65b7d1056b469ab\n2. Upload it to the 'data' folder in your research account.", "_____no_output_____" ] ], [ [ "mutual_fund_data = local_csv('mutual_fund_data.csv')\nexpense = mutual_fund_data['Annual Expense Ratio'].values\nsharpe = mutual_fund_data['Three Year Sharpe Ratio'].values\n\nplt.scatter(expense, sharpe)\nplt.xlabel('Expense Ratio')\nplt.ylabel('Sharpe Ratio')\n\nr_S = stats.spearmanr(expense, sharpe)\nprint 'Spearman Rank Coefficient: ', r_S[0]\nprint 'p-value: ', r_S[1]", "Spearman Rank Coefficient: -0.237573932355\np-value: 0.0167465097116\n" ] ], [ [ "Our p-value is below the cutoff, which means we accept the hypothesis that the two are correlated. The negative coefficient indicates that there is a negative correlation, and that more expensive mutual funds have worse sharpe ratios. However, there is some weird clustering in the data, it seems there are expensive groups with low sharpe ratios, and a main group whose sharpe ratio is unrelated to the expense. Further analysis would be required to understand what's going on here.", "_____no_output_____" ], [ "## Real World Use Case: Evaluating a Ranking Model\n\n### NOTE: [Factor Analysis](https://www.quantopian.com/lectures/factor-analysis) now covers this topic in much greater detail\n\nLet's say that we have some way of ranking securities and that we'd like to test how well our ranking performs in practice. In this case our model just takes the mean daily return for the last month and ranks the stocks by that metric. \n\nWe hypothesize that this will be predictive of the mean returns over the next month. To test this we score the stocks based on a lookback window, then take the spearman rank correlation of the score and the mean returns over the walk forward month.", "_____no_output_____" ] ], [ [ "symbol_list = ['A', 'AA', 'AAC', 'AAL', 'AAMC', 'AAME', 'AAN', 'AAOI', 'AAON', 'AAP', 'AAPL', 'AAT', 'AAU', 'AAV', 'AAVL', 'AAWW', 'AB', 'ABAC', 'ABAX', 'ABB', 'ABBV', 'ABC', 'ABCB', 'ABCD', 'ABCO', 'ABCW', 'ABDC', 'ABEV', 'ABG', 'ABGB']\n\n# Get the returns over the lookback window\nstart = '2014-12-01'\nend = '2015-01-01'\nhistorical_returns = get_pricing(symbol_list, fields='price', start_date=start, end_date=end).pct_change()[1:]\n\n# Compute our stock score\nscores = np.mean(historical_returns)\nprint 'Our Scores\\n'\nprint scores\nprint '\\n'\n\nstart = '2015-01-01'\nend = '2015-02-01'\nwalk_forward_returns = get_pricing(symbol_list, fields='price', start_date=start, end_date=end).pct_change()[1:]\nwalk_forward_returns = np.mean(walk_forward_returns)\nprint 'The Walk Forward Returns\\n'\nprint walk_forward_returns\nprint '\\n'\n\nplt.scatter(scores, walk_forward_returns)\nplt.xlabel('Scores')\nplt.ylabel('Walk Forward Returns')\n\nr_s = stats.spearmanr(scores, walk_forward_returns)\nprint 'Correlation Coefficient: ' + str(r_s[0])\nprint 'p-value: ' + str(r_s[1])", "Our Scores\n\nEquity(24757 [A]) -0.000624\nEquity(2 [AA]) -0.003646\nEquity(47842 [AAC]) 0.002745\nEquity(45971 [AAL]) 0.005677\nEquity(45415 [AAMC]) -0.020459\nEquity(21 [AAME]) 0.002207\nEquity(523 [AAN]) 0.003765\nEquity(45503 [AAOI]) 0.004758\nEquity(9681 [AAON]) 0.004228\nEquity(23175 [AAP]) 0.003887\nEquity(24 [AAPL]) -0.001870\nEquity(40707 [AAT]) 0.001433\nEquity(27922 [AAU]) -0.000576\nEquity(27884 [AAV]) 0.003966\nEquity(47422 [AAVL]) 0.017364\nEquity(28378 [AAWW]) 0.004993\nEquity(66 [AB]) -0.000934\nEquity(39927 [ABAC]) -0.005923\nEquity(31 [ABAX]) 0.000679\nEquity(22574 [ABB]) -0.002108\nEquity(43694 [ABBV]) -0.002467\nEquity(22954 [ABC]) -0.000335\nEquity(11232 [ABCB]) 0.002142\nEquity(39052 [ABCD]) -0.000333\nEquity(23176 [ABCO]) 0.008131\nEquity(47935 [ABCW]) 0.002776\nEquity(46877 [ABDC]) -0.000825\nEquity(45840 [ABEV]) 0.000035\nEquity(24761 [ABG]) 0.000830\nEquity(45676 [ABGB]) -0.003712\ndtype: float64\n\n\nThe Walk Forward Returns\n\nEquity(24757 [A]) -0.003616\nEquity(2 [AA]) -0.000464\nEquity(47842 [AAC]) -0.008585\nEquity(45971 [AAL]) -0.004467\nEquity(45415 [AAMC]) -0.030225\nEquity(21 [AAME]) -0.000321\nEquity(523 [AAN]) 0.001977\nEquity(45503 [AAOI]) -0.009537\nEquity(9681 [AAON]) -0.000094\nEquity(23175 [AAP]) 0.000246\nEquity(24 [AAPL]) 0.003940\nEquity(40707 [AAT]) 0.005084\nEquity(27922 [AAU]) 0.012719\nEquity(27884 [AAV]) -0.005286\nEquity(47422 [AAVL]) -0.018883\nEquity(28378 [AAWW]) -0.003406\nEquity(66 [AB]) -0.003890\nEquity(39927 [ABAC]) -0.007934\nEquity(31 [ABAX]) 0.003403\nEquity(22574 [ABB]) -0.004947\nEquity(43694 [ABBV]) -0.004363\nEquity(22954 [ABC]) 0.002655\nEquity(11232 [ABCB]) -0.001951\nEquity(39052 [ABCD]) 0.019828\nEquity(23176 [ABCO]) -0.001423\nEquity(47935 [ABCW]) -0.002212\nEquity(46877 [ABDC]) 0.001030\nEquity(45840 [ABEV]) 0.005386\nEquity(24761 [ABG]) -0.001088\nEquity(45676 [ABGB]) 0.013701\ndtype: float64\n\n\nCorrelation Coefficient: -0.159955506118\np-value: 0.398478813425\n" ] ], [ [ "The p-value indicates that our hypothesis is false and we accept the null hypothesis that our ranking was no better than random. This is a really good check of any ranking system one devises for constructing a long-short equity portfolio.", "_____no_output_____" ], [ "*This presentation is for informational purposes only and does not constitute an offer to sell, a solicitation to buy, or a recommendation for any security; nor does it constitute an offer to provide investment advisory or other services by Quantopian, Inc. (\"Quantopian\"). Nothing contained herein constitutes investment advice or offers any opinion with respect to the suitability of any security, and any views expressed herein should not be taken as advice to buy, sell, or hold any security or as an endorsement of any security or company. In preparing the information contained herein, Quantopian, Inc. has not taken into account the investment needs, objectives, and financial circumstances of any particular investor. Any views expressed and data illustrated herein were prepared based upon information, believed to be reliable, available to Quantopian, Inc. at the time of publication. Quantopian makes no guarantees as to their accuracy or completeness. All information is subject to change and may quickly become unreliable for various reasons, including changes in market conditions or economic circumstances.*", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
cbff9782f635dfa7e0330399610f9be374b8c899
1,175
ipynb
Jupyter Notebook
2020 Осенний семестр/Практическое задание 1/Олгашов_Задание 1.ipynb
mosalov/Notebook_For_AI_Main
a693d29bf0bdcf824cb4f1eca86ff54b67ba7428
[ "MIT" ]
6
2021-09-20T10:28:18.000Z
2022-03-14T18:39:17.000Z
2020 Осенний семестр/Практическое задание 1/Олгашов_Задание 1.ipynb
mosalov/Notebook_For_AI_Main
a693d29bf0bdcf824cb4f1eca86ff54b67ba7428
[ "MIT" ]
122
2020-09-07T11:57:57.000Z
2022-03-22T06:47:03.000Z
2020 Осенний семестр/Практическое задание 1/Олгашов_Задание 1.ipynb
mosalov/Notebook_For_AI_Main
a693d29bf0bdcf824cb4f1eca86ff54b67ba7428
[ "MIT" ]
97
2020-09-07T11:32:19.000Z
2022-03-31T10:27:38.000Z
17.279412
40
0.476596
[ [ [ "import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport sklearn as sk\nx = 'Роман'\ny = 'Олгашов'\nz = x +' ' + y\nprint(z)\nlen(z)", "Роман Олгашов\n" ] ] ]
[ "code" ]
[ [ "code" ] ]
cbff998b1907e130d22896e8c1a120af43ccb055
39,263
ipynb
Jupyter Notebook
week5/word2vec Part I (Completed).ipynb
Emmayyyyy/dso-560-nlp-and-text-analytics
76bde7d0ed7e760b5de455251a523e92a10116fd
[ "MIT" ]
null
null
null
week5/word2vec Part I (Completed).ipynb
Emmayyyyy/dso-560-nlp-and-text-analytics
76bde7d0ed7e760b5de455251a523e92a10116fd
[ "MIT" ]
null
null
null
week5/word2vec Part I (Completed).ipynb
Emmayyyyy/dso-560-nlp-and-text-analytics
76bde7d0ed7e760b5de455251a523e92a10116fd
[ "MIT" ]
null
null
null
31.740501
217
0.466903
[ [ [ "# Spacy", "_____no_output_____" ], [ "### Models\n\nSpacy comes with a variety of different models that can used per language. For instance, the models for English are available [here](https://spacy.io/models/en). You'll need to download each model separately:\n\n```python\npython3 -m spacy download en_core_web_sm\npython3 -m spacy download en_core_web_md\n\n```", "_____no_output_____" ], [ "## Pattern Matching Using Spacy", "_____no_output_____" ], [ "The below code and example is from Ashiq KS's article [Rule-Based Matching with spacy](https://medium.com/@ashiqgiga07/rule-based-matching-with-spacy-295b76ca2b68):", "_____no_output_____" ] ], [ [ "#The input text string is converted to a Document object\ntext = '''\nComputer programming is the process of writing instructions that get executed by computers. \nThe instructions, also known as code, are written in a programming language which the computer \ncan understand and use to perform a task or solve a problem. Basic computer programming involves \nthe analysis of a problem and development of a logical sequence of instructions to solve it. \nThere can be numerous paths to a solution and the computer programmer seeks to design and \ncode that which is most efficient. Among the programmer’s tasks are understanding requirements, \ndetermining the right programming language to use, designing or architecting the solution, coding, \ntesting, debugging and writing documentation so that the solution can be easily\nunderstood by other programmers.Computer programming is at the heart of computer science. It is the \nimplementation portion of software development, application development \nand software engineering efforts, transforming ideas and theories into actual, working solutions.\n'''", "_____no_output_____" ], [ "from spacy.matcher import Matcher #import Matcher class from spacy\n#import the Span class to extract the words from the document object\nfrom spacy.tokens import Span \n\n#Language class with the English model 'en_core_web_sm' is loaded\nnlp = spacy.load(\"en_core_web_sm\")\n\ndoc = nlp(text) # convert the string above to a document\n\n#instantiate a new Matcher class object \nmatcher = Matcher(nlp.vocab)", "_____no_output_____" ] ], [ [ "### Define the Target Pattern", "_____no_output_____" ], [ "The `pattern` object that you define should be a list of dictionary elements, each dictionary describing the token to match for. \n\nHere, we ", "_____no_output_____" ] ], [ [ "#define the pattern\npattern = [{'LOWER': 'computer', 'POS': 'NOUN'},\n {'POS':{'NOT_IN': ['VERB']}}]\n", "_____no_output_____" ] ], [ [ "### Load the Pattern into the Matcher", "_____no_output_____" ] ], [ [ "#add the pattern to the previously created matcher object\nmatcher.add(\"Matching\", None, pattern)", "_____no_output_____" ] ], [ [ "## Using Regular Expressions in Spacy", "_____no_output_____" ], [ "The below example can be found at https://spacy.io/usage/rule-based-matching. It uses the `re.finditer()` function to\nquickly iterate through all the matches found. ", "_____no_output_____" ] ], [ [ "import spacy\nimport re\nnlp = spacy.load(\"en_core_web_sm\")\ndoc = nlp(\"The United States of America (USA) are commonly known as the United States (U.S. or US) or America.\")\n\nexpression = r\"[Uu](nited|\\.?) ?[Ss](tates|\\.?)\"\nfor match in re.finditer(expression, doc.text):\n start, end = match.span()\n span = doc.char_span(start, end)\n # This is a Span object or None if match doesn't map to valid token sequence\n if span is not None:\n print(\"Found match:\", span.text)", "Found match: United States\nFound match: United States\nFound match: U.S.\nFound match: US\n" ] ], [ [ "## Part of Speech Tagging", "_____no_output_____" ] ], [ [ "!python3 -m spacy download en_core_web_sm\n!python3 -m spacy download en_core_web_md", "_____no_output_____" ], [ "import en_core_web_sm\nimport spacy\nfrom scipy.spatial.distance import cosine\nimport spacy\n\nnlp = spacy.load('en_core_web_md')", "_____no_output_____" ], [ "import pandas as pd\nrows = []\ndoc = nlp(u\"Steve Jobs and Apple is looking at buying U.K. startup for $1 billion\")\nfor token in doc:\n rows.append((token.text, token.lemma_, token.pos_, token.tag_, token.dep_,\n token.shape_, token.is_alpha, token.is_stop))\n \ndata = pd.DataFrame(rows, columns=[\"text\", \"lemma\", \"part_of_speech\", \"tag\", \"dependency\", \"shape\", \"is_alphanumeric\", \"is_stopword\"])\ndata.head()", "_____no_output_____" ] ], [ [ "### Named Entity Recognition", "_____no_output_____" ] ], [ [ "doc = nlp(u\"Steve Jobs and Apple is looking at buying U.K. startup for $1 billion\")\nimport en_core_web_sm\nimport spacy\nfrom scipy.spatial.distance import cosine\nnlp = en_core_web_sm.load()\n\nfor ent in doc.ents:\n print(ent.text, ent.start_char, ent.end_char, ent.label_)", "Steve Jobs 0 10 PERSON\nApple 15 20 ORG\nU.K. 42 46 GPE\n$1 billion 59 69 MONEY\n" ], [ "# visualize this using displacy:\nfrom spacy import displacy\ndisplacy.render(doc, style=\"ent\", jupyter=True)", "_____no_output_____" ] ], [ [ "# Word Embeddings (word2vec Introduction)", "_____no_output_____" ], [ "## Continuous Bag of Words (Use Context to Predict Target Word)\n![alt text](images/word2vec_cbow.png \"Logo Title Text 1\")", "_____no_output_____" ], [ "## Softmax\n![alt text](images/softmax.png \"Logo Title Text 1\")", "_____no_output_____" ], [ "## Skipgram\n![alt text](images/skipgram.png \"Logo Title Text 1\")\n\n## Softmax\n![alt text](images/wordembedding_cluster.png \"Logo Title Text 1\")", "_____no_output_____" ] ], [ [ "import en_core_web_sm\nimport spacy\nfrom scipy.spatial.distance import cosine\nnlp = en_core_web_sm.load()", "_____no_output_____" ], [ "tokens = nlp(u'dog cat Beijing sad depressed couch sofa canine China Chinese France Paris banana')\n\nfor token1 in tokens:\n for token2 in tokens:\n if token1 != token2:\n print(f\" {token1} - {token2}: {1 - cosine(token1.vector, token2.vector)}\")", " dog - cat: 0.4564264118671417\n dog - Beijing: 0.1571345329284668\n dog - sad: 0.3079860210418701\n dog - depressed: 0.11385080963373184\n dog - couch: 0.5404482483863831\n dog - sofa: 0.33240464329719543\n dog - canine: 0.4633784294128418\n dog - China: 0.0019485866650938988\n dog - Chinese: 0.021737948060035706\n dog - France: 0.1857185661792755\n dog - Paris: 0.11601343750953674\n dog - banana: 0.3103766441345215\n cat - dog: 0.4564264118671417\n cat - Beijing: 0.25583046674728394\n cat - sad: 0.06742441654205322\n cat - depressed: 0.11650095880031586\n cat - couch: 0.37735462188720703\n cat - sofa: 0.414833128452301\n cat - canine: 0.45437631011009216\n cat - China: 0.14348067343235016\n cat - Chinese: 0.03203266113996506\n cat - France: 0.26350462436676025\n cat - Paris: 0.1825326830148697\n cat - banana: 0.4973468482494354\n Beijing - dog: 0.1571345329284668\n Beijing - cat: 0.25583046674728394\n Beijing - sad: 0.16756749153137207\n Beijing - depressed: 0.020596839487552643\n Beijing - couch: 0.1764748990535736\n Beijing - sofa: 0.16452562808990479\n Beijing - canine: 0.26767805218696594\n Beijing - China: 0.5818219780921936\n Beijing - Chinese: 0.27865567803382874\n Beijing - France: 0.5205764174461365\n Beijing - Paris: 0.424411803483963\n Beijing - banana: 0.3193724453449249\n sad - dog: 0.3079860210418701\n sad - cat: 0.06742441654205322\n sad - Beijing: 0.16756749153137207\n sad - depressed: 0.3213621973991394\n sad - couch: 0.35177069902420044\n sad - sofa: 0.27763885259628296\n sad - canine: 0.2469976246356964\n sad - China: 0.14939850568771362\n sad - Chinese: 0.20662599802017212\n sad - France: 0.17732539772987366\n sad - Paris: 0.048603419214487076\n sad - banana: 0.27784866094589233\n depressed - dog: 0.11385080963373184\n depressed - cat: 0.11650095880031586\n depressed - Beijing: 0.020596839487552643\n depressed - sad: 0.3213621973991394\n depressed - couch: 0.2007930874824524\n depressed - sofa: 0.3122972249984741\n depressed - canine: 0.1812627762556076\n depressed - China: 0.1365271508693695\n depressed - Chinese: 0.21596072614192963\n depressed - France: 0.07575459033250809\n depressed - Paris: 0.027627602219581604\n depressed - banana: 0.0631336122751236\n couch - dog: 0.5404482483863831\n couch - cat: 0.37735462188720703\n couch - Beijing: 0.1764748990535736\n couch - sad: 0.35177069902420044\n couch - depressed: 0.2007930874824524\n couch - sofa: 0.4999978840351105\n couch - canine: 0.41085895895957947\n couch - China: 0.08625976741313934\n couch - Chinese: 0.21824336051940918\n couch - France: 0.2151881605386734\n couch - Paris: 0.21405968070030212\n couch - banana: 0.28344547748565674\n sofa - dog: 0.33240464329719543\n sofa - cat: 0.414833128452301\n sofa - Beijing: 0.16452562808990479\n sofa - sad: 0.27763885259628296\n sofa - depressed: 0.3122972249984741\n sofa - couch: 0.4999978840351105\n sofa - canine: 0.26518216729164124\n sofa - China: 0.12562435865402222\n sofa - Chinese: 0.11188281327486038\n sofa - France: 0.25162801146507263\n sofa - Paris: 0.17580696940422058\n sofa - banana: 0.40957292914390564\n canine - dog: 0.4633784294128418\n canine - cat: 0.45437631011009216\n canine - Beijing: 0.26767805218696594\n canine - sad: 0.2469976246356964\n canine - depressed: 0.1812627762556076\n canine - couch: 0.41085895895957947\n canine - sofa: 0.26518216729164124\n canine - China: 0.21173277497291565\n canine - Chinese: 0.1380724459886551\n canine - France: 0.27453306317329407\n canine - Paris: 0.13104529678821564\n canine - banana: 0.279147207736969\n China - dog: 0.0019485866650938988\n China - cat: 0.14348067343235016\n China - Beijing: 0.5818219780921936\n China - sad: 0.14939850568771362\n China - depressed: 0.1365271508693695\n China - couch: 0.08625976741313934\n China - sofa: 0.12562435865402222\n China - canine: 0.21173277497291565\n China - Chinese: 0.545533299446106\n China - France: 0.673493504524231\n China - Paris: 0.4618699848651886\n China - banana: 0.1769627034664154\n Chinese - dog: 0.021737948060035706\n Chinese - cat: 0.03203266113996506\n Chinese - Beijing: 0.27865567803382874\n Chinese - sad: 0.20662599802017212\n Chinese - depressed: 0.21596072614192963\n Chinese - couch: 0.21824336051940918\n Chinese - sofa: 0.11188281327486038\n Chinese - canine: 0.1380724459886551\n Chinese - China: 0.545533299446106\n Chinese - France: 0.4462940990924835\n Chinese - Paris: 0.554480791091919\n Chinese - banana: 0.03465459123253822\n France - dog: 0.1857185661792755\n France - cat: 0.26350462436676025\n France - Beijing: 0.5205764174461365\n France - sad: 0.17732539772987366\n France - depressed: 0.07575459033250809\n France - couch: 0.2151881605386734\n France - sofa: 0.25162801146507263\n France - canine: 0.27453306317329407\n France - China: 0.673493504524231\n France - Chinese: 0.4462940990924835\n France - Paris: 0.3865208923816681\n France - banana: 0.3307388424873352\n Paris - dog: 0.11601343750953674\n Paris - cat: 0.1825326830148697\n Paris - Beijing: 0.424411803483963\n Paris - sad: 0.048603419214487076\n Paris - depressed: 0.027627602219581604\n Paris - couch: 0.21405968070030212\n Paris - sofa: 0.17580696940422058\n Paris - canine: 0.13104529678821564\n Paris - China: 0.4618699848651886\n Paris - Chinese: 0.554480791091919\n Paris - France: 0.3865208923816681\n Paris - banana: 0.2538565695285797\n banana - dog: 0.3103766441345215\n banana - cat: 0.4973468482494354\n banana - Beijing: 0.3193724453449249\n banana - sad: 0.27784866094589233\n banana - depressed: 0.0631336122751236\n banana - couch: 0.28344547748565674\n banana - sofa: 0.40957292914390564\n banana - canine: 0.279147207736969\n banana - China: 0.1769627034664154\n banana - Chinese: 0.03465459123253822\n banana - France: 0.3307388424873352\n banana - Paris: 0.2538565695285797\n" ] ], [ [ "# Finding Most Similar Words (Using Our Old Methods)", "_____no_output_____" ] ], [ [ "from sklearn.feature_extraction.text import CountVectorizer\n\n# inspect the default settings for CountVectorizer\nCountVectorizer()", "_____no_output_____" ], [ "reviews = open(\"poor_amazon_toy_reviews.txt\").readlines()\n\nvectorizer = CountVectorizer(ngram_range=(1, 1), \n stop_words=\"english\", \n max_features=500,token_pattern='(?u)\\\\b[a-zA-Z][a-zA-Z]+\\\\b')\nX = vectorizer.fit_transform(reviews)\n\ndata = pd.DataFrame(X.toarray(), columns=vectorizer.get_feature_names())\ndata.head()", "_____no_output_____" ], [ "from sklearn.metrics.pairwise import cosine_similarity\n\n# create similiarity matrix\nsimilarity_matrix = pd.DataFrame(cosine_similarity(data.T.values), \n columns=vectorizer.get_feature_names(),\n index=vectorizer.get_feature_names())", "_____no_output_____" ], [ "# unstack matrix into table\nsimilarity_table = similarity_matrix.rename_axis(None).rename_axis(None, axis=1).stack().reset_index()", "_____no_output_____" ], [ "# rename columns\nsimilarity_table.columns = [\"word1\", \"word2\", \"similarity\"]\nsimilarity_table.shape", "_____no_output_____" ], [ "similarity_table = similarity_table[similarity_table[\"similarity\"] < 0.99]\nsimilarity_table.shape", "_____no_output_____" ], [ "similarity_table.sort_values(by=\"similarity\", ascending=False).drop_duplicates(\n subset=\"similarity\", keep=\"first\").head(10)", "_____no_output_____" ], [ "top_500_words = vectorizer.get_feature_names()", "_____no_output_____" ] ], [ [ "# Exercise: Similar Words Using Word Embeddings", "_____no_output_____" ] ], [ [ "# load into spacy your top 500 words\n\ntokens = nlp(f'{\" \".join(top_500_words)}')", "_____no_output_____" ], [ "from itertools import product\n# create a list of similarity tuples\n\nsimilarity_tuples = []\n\nfor token1, token2 in product(tokens, repeat=2):\n similarity_tuples.append((token1, token2, token1.similarity(token2)))\n\nsimilarities = pd.DataFrame(similarity_tuples, columns=[\"word1\",\"word2\", \"score\"])\n", "_____no_output_____" ], [ "# find similar words\nsimilarities[similarities[\"score\"] < 1].sort_values(\n by=\"score\", ascending=False).drop_duplicates(\n subset=\"score\", keep=\"first\").head(5)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
cbff9c4b9049482d3190a043b82f817af8b6bebd
242,756
ipynb
Jupyter Notebook
notebooks/dev/n17_training_a_volume_estimator.ipynb
mtasende/Machine-Learning-Nanodegree-Capstone
69c5ca499fa52122d51131a0607bc199ec054cfd
[ "MIT" ]
1
2019-11-12T20:01:14.000Z
2019-11-12T20:01:14.000Z
notebooks/dev/n17_training_a_volume_estimator.ipynb
mtasende/Machine-Learning-Nanodegree-Capstone
69c5ca499fa52122d51131a0607bc199ec054cfd
[ "MIT" ]
null
null
null
notebooks/dev/n17_training_a_volume_estimator.ipynb
mtasende/Machine-Learning-Nanodegree-Capstone
69c5ca499fa52122d51131a0607bc199ec054cfd
[ "MIT" ]
null
null
null
154.228717
89,066
0.791844
[ [ [ "# In this notebook an estimator for the Volume will be trained. No hyperparameters will be searched for, and the ones from the 'Close' values estimator will be used instead.", "_____no_output_____" ] ], [ [ "# Basic imports\nimport os\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport datetime as dt\nimport scipy.optimize as spo\nimport sys\nfrom time import time\nfrom sklearn.metrics import r2_score, median_absolute_error\n\n%matplotlib inline\n\n%pylab inline\npylab.rcParams['figure.figsize'] = (20.0, 10.0)\n\n%load_ext autoreload\n%autoreload 2\n\nsys.path.append('../../')\n\nfrom sklearn.externals import joblib\nimport utils.preprocessing as pp\nimport predictor.feature_extraction as fe", "Populating the interactive namespace from numpy and matplotlib\n" ] ], [ [ "## Let's generate the datasets", "_____no_output_____" ] ], [ [ "def generate_one_set(params):\n # print(('-'*70 + '\\n {}, {} \\n' + '-'*70).format(params['base_days'].values, params['ahead_days'].values))\n tic = time()\n \n train_val_time = int(params['train_val_time'])\n base_days = int(params['base_days'])\n step_days = int(params['step_days'])\n ahead_days = int(params['ahead_days'])\n \n print('Generating: base{}_ahead{}'.format(base_days, ahead_days))\n pid = 'base{}_ahead{}'.format(base_days, ahead_days)\n \n # Getting the data\n data_df = pd.read_pickle('../../data/data_train_val_df.pkl')\n today = data_df.index[-1] # Real date\n print(pid + ') data_df loaded')\n\n # Drop symbols with many missing points\n data_df = pp.drop_irrelevant_symbols(data_df, params['GOOD_DATA_RATIO'])\n print(pid + ') Irrelevant symbols dropped.')\n \n # Generate the intervals for the predictor\n x, y = fe.generate_train_intervals(data_df, \n train_val_time, \n base_days, \n step_days,\n ahead_days, \n today, \n fe.feature_volume_one_to_one,\n target_feature=fe.VOLUME_FEATURE) \n print(pid + ') Intervals generated')\n \n # Drop \"bad\" samples and fill missing data\n x_y_df = pd.concat([x, y], axis=1)\n x_y_df = pp.drop_irrelevant_samples(x_y_df, params['SAMPLES_GOOD_DATA_RATIO'])\n x = x_y_df.iloc[:, :-1]\n y = x_y_df.iloc[:, -1]\n x = pp.fill_missing(x)\n print(pid + ') Irrelevant samples dropped and missing data filled.')\n \n # Pickle that\n x.to_pickle('../../data/x_volume_{}.pkl'.format(pid))\n y.to_pickle('../../data/y_volume_{}.pkl'.format(pid))\n \n toc = time()\n print('%s) %i intervals generated in: %i seconds.' % (pid, x.shape[0], (toc-tic)))\n \n return pid, x, y", "_____no_output_____" ], [ "best_params_df = pd.read_pickle('../../data/best_params_final_df.pkl').loc[1,:]\nto_drop = [\n 'model',\n 'mre',\n 'r2',\n 'x_filename',\n 'y_filename',\n 'train_days'\n]\nbest_params_df.drop(to_drop, inplace=True)\nbest_params_df", "_____no_output_____" ], [ "generate_one_set(best_params_df)", "Generating: base112_ahead1\nbase112_ahead1) data_df loaded\nbase112_ahead1) Irrelevant symbols dropped.\nbase112_ahead1) Intervals generated\nbase112_ahead1) Irrelevant samples dropped and missing data filled.\nbase112_ahead1) 219281 intervals generated in: 164 seconds.\n" ], [ "x_volume = pd.read_pickle('../../data/x_volume_base112_ahead1.pkl')\nprint(x_volume.shape)\nx_volume.head()", "(219281, 112)\n" ], [ "y_volume = pd.read_pickle('../../data/y_volume_base112_ahead1.pkl')\nprint(y_volume.shape)\ny_volume.head()", "(219281,)\n" ] ], [ [ "## Let's generate the test dataset, also", "_____no_output_____" ] ], [ [ "def generate_one_test_set(params, data_df):\n # print(('-'*70 + '\\n {}, {} \\n' + '-'*70).format(params['base_days'].values, params['ahead_days'].values))\n tic = time()\n \n train_val_time = int(params['train_val_time'])\n base_days = int(params['base_days'])\n step_days = int(params['step_days'])\n ahead_days = int(params['ahead_days'])\n \n print('Generating: base{}_ahead{}'.format(base_days, ahead_days))\n pid = 'base{}_ahead{}'.format(base_days, ahead_days)\n \n # Getting the data\n today = data_df.index[-1] # Real date\n print(pid + ') data_df loaded')\n\n # Drop symbols with many missing points\n y_train_df = pd.read_pickle('../../data/y_volume_{}.pkl'.format(pid))\n kept_symbols = y_train_df.index.get_level_values(1).unique().tolist()\n data_df = data_df.loc[:, (slice(None), kept_symbols)]\n print(pid + ') Irrelevant symbols dropped.')\n \n # Generate the intervals for the predictor\n x, y = fe.generate_train_intervals(data_df, \n train_val_time, \n base_days, \n step_days,\n ahead_days, \n today, \n fe.feature_volume_one_to_one,\n target_feature=fe.VOLUME_FEATURE) \n print(pid + ') Intervals generated')\n \n # Drop \"bad\" samples and fill missing data\n x_y_df = pd.concat([x, y], axis=1)\n x_y_df = pp.drop_irrelevant_samples(x_y_df, params['SAMPLES_GOOD_DATA_RATIO'])\n x = x_y_df.iloc[:, :-1]\n y = x_y_df.iloc[:, -1]\n x = pp.fill_missing(x)\n print(pid + ') Irrelevant samples dropped and missing data filled.')\n \n # Pickle that\n x.to_pickle('../../data/x_volume_{}_test.pkl'.format(pid))\n y.to_pickle('../../data/y_volume_{}_test.pkl'.format(pid))\n \n toc = time()\n print('%s) %i intervals generated in: %i seconds.' % (pid, x.shape[0], (toc-tic)))\n \n return pid, x, ", "_____no_output_____" ], [ "data_test_df = pd.read_pickle('../../data/data_test_df.pkl')\ngenerate_one_test_set(best_params_df, data_test_df)", "Generating: base112_ahead1\nbase112_ahead1) data_df loaded\nbase112_ahead1) Irrelevant symbols dropped.\nbase112_ahead1) Intervals generated\nbase112_ahead1) Irrelevant samples dropped and missing data filled.\nbase112_ahead1) 15957 intervals generated in: 2 seconds.\n" ], [ "x_volume_test = pd.read_pickle('../../data/x_volume_base112_ahead1_test.pkl')\nprint(x_volume_test.shape)\nx_volume_test.head()", "(15957, 112)\n" ], [ "y_volume_test = pd.read_pickle('../../data/y_volume_base112_ahead1_test.pkl')\nprint(y_volume_test.shape)\ny_volume_test.head()", "(15957,)\n" ] ], [ [ "## Let's train a predictor for the 'Volume' with the same hyperparameters as for the 'Close' one.", "_____no_output_____" ] ], [ [ "best_params_df = pd.read_pickle('../../data/best_params_final_df.pkl')", "_____no_output_____" ], [ "import predictor.feature_extraction as fe\nfrom predictor.linear_predictor import LinearPredictor\nimport utils.misc as misc\nimport predictor.evaluation as ev\n\nahead_days = 1\n\n# Get some parameters\ntrain_days = int(best_params_df.loc[ahead_days, 'train_days'])\nGOOD_DATA_RATIO, \\\ntrain_val_time, \\\nbase_days, \\\nstep_days, \\\nahead_days, \\\nSAMPLES_GOOD_DATA_RATIO, \\\nx_filename, \\\ny_filename = misc.unpack_params(best_params_df.loc[ahead_days,:])\n\npid = 'base{}_ahead{}'.format(base_days, ahead_days)\n\n# Get the datasets\nx_train = pd.read_pickle('../../data/x_volume_{}.pkl'.format(pid))\ny_train = pd.read_pickle('../../data/y_volume_{}.pkl'.format(pid))\nx_test = pd.read_pickle('../../data/x_volume_{}_test.pkl'.format(pid)).sort_index()\ny_test = pd.DataFrame(pd.read_pickle('../../data/y_volume_{}_test.pkl'.format(pid))).sort_index()\n\n# Let's cut the training set to use only the required number of samples\nend_date = x_train.index.levels[0][-1]\nstart_date = fe.add_market_days(end_date, -train_days)\nx_sub_df = x_train.loc[(slice(start_date,None),slice(None)),:]\ny_sub_df = pd.DataFrame(y_train.loc[(slice(start_date,None),slice(None))])\n\n# Create the estimator and train\nestimator = LinearPredictor()\nestimator.fit(x_sub_df, y_sub_df)\n\n# Get the training and test predictions\ny_train_pred = estimator.predict(x_sub_df)\ny_test_pred = estimator.predict(x_test)\n\n# Get the training and test metrics for each symbol\nmetrics_train = ev.get_metrics_df(y_sub_df, y_train_pred)\nmetrics_test = ev.get_metrics_df(y_test, y_test_pred)\n\n# Show the mean metrics\nmetrics_df = pd.DataFrame(columns=['train', 'test'])\nmetrics_df['train'] = metrics_train.mean()\nmetrics_df['test'] = metrics_test.mean()\nprint('Mean metrics: \\n{}\\n{}'.format(metrics_df,'-'*70))\n\n# Plot the metrics in time\nmetrics_train_time = ev.get_metrics_in_time(y_sub_df, y_train_pred, base_days + ahead_days)\nmetrics_test_time = ev.get_metrics_in_time(y_test, y_test_pred, base_days + ahead_days)\nplt.plot(metrics_train_time[2], metrics_train_time[0], label='train', marker='.')\nplt.plot(metrics_test_time[2], metrics_test_time[0], label='test', marker='.')\nplt.title('$r^2$ metrics')\nplt.legend()\nplt.figure()\nplt.plot(metrics_train_time[2], metrics_train_time[1], label='train', marker='.')\nplt.plot(metrics_test_time[2], metrics_test_time[1], label='test', marker='.')\nplt.title('MRE metrics')\nplt.legend()", "Mean metrics: \n train test\nr2 0.539703 0.480612\nmre 0.277789 0.277234\n----------------------------------------------------------------------\n" ], [ "joblib.dump(estimator, '../../data/best_volume_predictor.pkl')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
cbffa4d25266d3fbef5e68508b143442eb7b18af
22,634
ipynb
Jupyter Notebook
Model backlog/Inference/166-tweet-inference-5fold-roberta-base-91-exp3-las.ipynb
dimitreOliveira/Tweet-Sentiment-Extraction
0a775abe9a92c4bc2db957519c523be7655df8d8
[ "MIT" ]
11
2020-06-17T07:30:20.000Z
2022-03-25T16:56:01.000Z
Model backlog/Inference/166-tweet-inference-5fold-roberta-base-91-exp3-las.ipynb
dimitreOliveira/Tweet-Sentiment-Extraction
0a775abe9a92c4bc2db957519c523be7655df8d8
[ "MIT" ]
null
null
null
Model backlog/Inference/166-tweet-inference-5fold-roberta-base-91-exp3-las.ipynb
dimitreOliveira/Tweet-Sentiment-Extraction
0a775abe9a92c4bc2db957519c523be7655df8d8
[ "MIT" ]
null
null
null
31.923836
150
0.438367
[ [ [ "## Dependencies", "_____no_output_____" ] ], [ [ "import json, glob\nfrom tweet_utility_scripts import *\nfrom tweet_utility_preprocess_roberta_scripts_aux import *\nfrom transformers import TFRobertaModel, RobertaConfig\nfrom tokenizers import ByteLevelBPETokenizer\nfrom tensorflow.keras import layers\nfrom tensorflow.keras.models import Model", "_____no_output_____" ] ], [ [ "# Load data", "_____no_output_____" ] ], [ [ "test = pd.read_csv('/kaggle/input/tweet-sentiment-extraction/test.csv')\n\nprint('Test samples: %s' % len(test))\ndisplay(test.head())", "Test samples: 3534\n" ] ], [ [ "# Model parameters", "_____no_output_____" ] ], [ [ "input_base_path = '/kaggle/input/166-robertabase-last/'\nwith open(input_base_path + 'config.json') as json_file:\n config = json.load(json_file)\n\nconfig", "_____no_output_____" ], [ "base_path = '/kaggle/input/qa-transformers/roberta/'\n\nvocab_path = base_path + 'roberta-base-vocab.json'\nmerges_path = base_path + 'roberta-base-merges.txt'\nconfig['base_model_path'] = base_path + 'roberta-base-tf_model.h5'\nconfig['config_path'] = base_path + 'roberta-base-config.json'\n\nmodel_path_list = glob.glob(input_base_path + '*.h5')\nmodel_path_list.sort()\nprint('Models to predict:')\nprint(*model_path_list, sep = \"\\n\")", "Models to predict:\n/kaggle/input/166-robertabase-last/last_model_fold_1.h5\n/kaggle/input/166-robertabase-last/last_model_fold_2.h5\n/kaggle/input/166-robertabase-last/last_model_fold_3.h5\n/kaggle/input/166-robertabase-last/last_model_fold_4.h5\n/kaggle/input/166-robertabase-last/last_model_fold_5.h5\n" ] ], [ [ "# Tokenizer", "_____no_output_____" ] ], [ [ "tokenizer = ByteLevelBPETokenizer(vocab_file=vocab_path, merges_file=merges_path, \n lowercase=True, add_prefix_space=True)", "_____no_output_____" ] ], [ [ "# Pre process", "_____no_output_____" ] ], [ [ "test['text'].fillna('', inplace=True)\ntest[\"text\"] = test[\"text\"].apply(lambda x: x.lower())\ntest[\"text\"] = test[\"text\"].apply(lambda x: x.strip())\n\nx_test, x_test_aux, x_test_aux_2 = get_data_test(test, tokenizer, config['MAX_LEN'], preprocess_fn=preprocess_roberta_test)", "_____no_output_____" ] ], [ [ "# Model", "_____no_output_____" ] ], [ [ "module_config = RobertaConfig.from_pretrained(config['config_path'], output_hidden_states=False)\n\ndef model_fn(MAX_LEN):\n input_ids = layers.Input(shape=(MAX_LEN,), dtype=tf.int32, name='input_ids')\n attention_mask = layers.Input(shape=(MAX_LEN,), dtype=tf.int32, name='attention_mask')\n \n base_model = TFRobertaModel.from_pretrained(config['base_model_path'], config=module_config, name=\"base_model\")\n last_hidden_state, _ = base_model({'input_ids': input_ids, 'attention_mask': attention_mask})\n\n x_start = layers.Dropout(.1)(last_hidden_state) \n x_start = layers.Dense(1)(x_start)\n x_start = layers.Flatten()(x_start)\n y_start = layers.Activation('softmax', name='y_start')(x_start)\n\n x_end = layers.Dropout(.1)(last_hidden_state) \n x_end = layers.Dense(1)(x_end)\n x_end = layers.Flatten()(x_end)\n y_end = layers.Activation('softmax', name='y_end')(x_end)\n \n model = Model(inputs=[input_ids, attention_mask], outputs=[y_start, y_end])\n \n return model", "_____no_output_____" ] ], [ [ "# Make predictions", "_____no_output_____" ] ], [ [ "NUM_TEST_IMAGES = len(test)\ntest_start_preds = np.zeros((NUM_TEST_IMAGES, config['MAX_LEN']))\ntest_end_preds = np.zeros((NUM_TEST_IMAGES, config['MAX_LEN']))\n\nfor model_path in model_path_list:\n print(model_path)\n model = model_fn(config['MAX_LEN'])\n model.load_weights(model_path)\n \n test_preds = model.predict(get_test_dataset(x_test, config['BATCH_SIZE'])) \n test_start_preds += test_preds[0]\n test_end_preds += test_preds[1]", "/kaggle/input/166-robertabase-last/last_model_fold_1.h5\n/kaggle/input/166-robertabase-last/last_model_fold_2.h5\n/kaggle/input/166-robertabase-last/last_model_fold_3.h5\n/kaggle/input/166-robertabase-last/last_model_fold_4.h5\n/kaggle/input/166-robertabase-last/last_model_fold_5.h5\n" ] ], [ [ "# Post process", "_____no_output_____" ] ], [ [ "test['start'] = test_start_preds.argmax(axis=-1)\ntest['end'] = test_end_preds.argmax(axis=-1)\n\ntest['text_len'] = test['text'].apply(lambda x : len(x))\ntest['text_wordCnt'] = test['text'].apply(lambda x : len(x.split(' ')))\ntest[\"end\"].clip(0, test[\"text_len\"], inplace=True)\ntest[\"start\"].clip(0, test[\"end\"], inplace=True)\n\ntest['selected_text'] = test.apply(lambda x: decode(x['start'], x['end'], x['text'], config['question_size'], tokenizer), axis=1)\ntest[\"selected_text\"].fillna(test[\"text\"], inplace=True)", "_____no_output_____" ] ], [ [ "# Visualize predictions", "_____no_output_____" ] ], [ [ "display(test.head(10))", "_____no_output_____" ] ], [ [ "# Test set predictions", "_____no_output_____" ] ], [ [ "submission = pd.read_csv('/kaggle/input/tweet-sentiment-extraction/sample_submission.csv')\nsubmission['selected_text'] = test[\"selected_text\"]\nsubmission.to_csv('submission.csv', index=False)\nsubmission.head(10)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cbffcc3eead2b64df0b1780c44b5b88cc0c1b09a
72,988
ipynb
Jupyter Notebook
src/03_stability_time_domain_specifications/aircraft_example.ipynb
2001engenharia/Control-System-Lectures
8523e42a62b23475b723370bd26937f16d65f046
[ "MIT" ]
2
2020-08-21T01:23:51.000Z
2020-11-02T12:49:14.000Z
src/03_stability_time_domain_specifications/aircraft_example.ipynb
2001engenharia/Control-System-Lectures
8523e42a62b23475b723370bd26937f16d65f046
[ "MIT" ]
null
null
null
src/03_stability_time_domain_specifications/aircraft_example.ipynb
2001engenharia/Control-System-Lectures
8523e42a62b23475b723370bd26937f16d65f046
[ "MIT" ]
1
2021-06-15T22:23:11.000Z
2021-06-15T22:23:11.000Z
249.105802
18,512
0.925549
[ [ [ "# import the package\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport control as ct\nfrom control.matlab import *", "_____no_output_____" ], [ "\"\"\"\nThis code shows the plane's response to longitudinal movement of a\naircraft when it is disturbed by external forces\nWind, changes in atmospheric conditions or pilots commands.\n\nReferences\nM. V. Cook, Flight Dynamics Principles: A Linear Systems Approach to\nAircraft Stability and Control.\n\"\"\"", "_____no_output_____" ], [ "# Open Loop\ns = tf('s')\n# long period\nH_long = 1/(s**2 + 0.017*s + 0.002)\npole(H_long)", "_____no_output_____" ], [ "damp(H_long)", "_____Eigenvalue______ Damping___ Frequency_\n -0.0085 +0.04391j 0.1901 0.04472\n -0.0085 -0.04391j 0.1901 0.04472\n" ], [ "# Short period\nH_short = 1/(s**2 + 1.74*s + 29.49)\npole(H_short)", "_____no_output_____" ], [ "damp(H_short)", "_____no_output_____" ], [ "# TF from the rest of the aircraft \nG1 = -20.6*(s + 0.013)*(s + 0.62)\n# TF from pitch for disturbance in elevator\naircraft = G1 * H_long * H_short", "_____no_output_____" ], [ "t, y1 = impulse(aircraft)\nplt.plot(y1,t)\nplt.title('Longitudinal moviment in elevator')", "_____no_output_____" ], [ "t, y2 = impulse(H_long)\nplt.plot(y2,t)\nplt.title('Phugoid moviment due an disturbance in elevator')", "_____no_output_____" ], [ "t, y3 = impulse(H_short)\nplt.plot(y3,t)\nplt.title('Short moviment due an disturbance in elevator')", "_____no_output_____" ], [ "# Closed Loop\nK = 0.03;\nairplane_long_Closed_Loop = feedback(K*aircraft,1)\n\nt, y_closed = step(airplane_long_Closed_Loop)\nplt.plot(y_closed,t)\nt, y_open = step(aircraft)\nplt.plot(y_open,t)\nplt.legend(['Closed Loop','Open Loop'])", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbffce28cc3aff79ed7b9b930a13d349a05965d8
15,294
ipynb
Jupyter Notebook
.ipynb_aml_checkpoints/17 - Monitor Data Drift-checkpoint2021-9-12-16-33-54Z.ipynb
JavierMedel/mslearn-dp100
dec1fb68cb3776878d158d24e100fa35ea31fd43
[ "MIT" ]
null
null
null
.ipynb_aml_checkpoints/17 - Monitor Data Drift-checkpoint2021-9-12-16-33-54Z.ipynb
JavierMedel/mslearn-dp100
dec1fb68cb3776878d158d24e100fa35ea31fd43
[ "MIT" ]
null
null
null
.ipynb_aml_checkpoints/17 - Monitor Data Drift-checkpoint2021-9-12-16-33-54Z.ipynb
JavierMedel/mslearn-dp100
dec1fb68cb3776878d158d24e100fa35ea31fd43
[ "MIT" ]
null
null
null
47.496894
862
0.567216
[ [ [ "# Monitoring Data Drift\n\nOver time, models can become less effective at predicting accurately due to changing trends in feature data. This phenomenon is known as *data drift*, and it's important to monitor your machine learning solution to detect it so you can retrain your models if necessary.\n\nIn this lab, you'll configure data drift monitoring for datasets.", "_____no_output_____" ], [ "## Before you start\n\nIn addition to the latest version of the **azureml-sdk** and **azureml-widgets** packages, you'll need the **azureml-datadrift** package to run the code in this notebook. Run the cell below to verify that it is installed.", "_____no_output_____" ] ], [ [ "!pip show azureml-datadrift", "_____no_output_____" ] ], [ [ "## Connect to your workspace\n\nWith the required SDK packages installed, now you're ready to connect to your workspace.\n\n> **Note**: If you haven't already established an authenticated session with your Azure subscription, you'll be prompted to authenticate by clicking a link, entering an authentication code, and signing into Azure.", "_____no_output_____" ] ], [ [ "from azureml.core import Workspace\n\n# Load the workspace from the saved config file\nws = Workspace.from_config()\nprint('Ready to work with', ws.name)", "_____no_output_____" ] ], [ [ "## Create a *baseline* dataset\n\nTo monitor a dataset for data drift, you must register a *baseline* dataset (usually the dataset used to train your model) to use as a point of comparison with data collected in the future. ", "_____no_output_____" ] ], [ [ "from azureml.core import Datastore, Dataset\n\n\n# Upload the baseline data\ndefault_ds = ws.get_default_datastore()\ndefault_ds.upload_files(files=['./data/diabetes.csv', './data/diabetes2.csv'],\n target_path='diabetes-baseline',\n overwrite=True, \n show_progress=True)\n\n# Create and register the baseline dataset\nprint('Registering baseline dataset...')\nbaseline_data_set = Dataset.Tabular.from_delimited_files(path=(default_ds, 'diabetes-baseline/*.csv'))\nbaseline_data_set = baseline_data_set.register(workspace=ws, \n name='diabetes baseline',\n description='diabetes baseline data',\n tags = {'format':'CSV'},\n create_new_version=True)\n\nprint('Baseline dataset registered!')", "_____no_output_____" ] ], [ [ "## Create a *target* dataset\n\nOver time, you can collect new data with the same features as your baseline training data. To compare this new data to the baseline data, you must define a target dataset that includes the features you want to analyze for data drift as well as a timestamp field that indicates the point in time when the new data was current -this enables you to measure data drift over temporal intervals. The timestamp can either be a field in the dataset itself, or derived from the folder and filename pattern used to store the data. For example, you might store new data in a folder hierarchy that consists of a folder for the year, containing a folder for the month, which in turn contains a folder for the day; or you might just encode the year, month, and day in the file name like this: *data_2020-01-29.csv*; which is the approach taken in the following code:", "_____no_output_____" ] ], [ [ "import datetime as dt\nimport pandas as pd\n\nprint('Generating simulated data...')\n\n# Load the smaller of the two data files\ndata = pd.read_csv('data/diabetes2.csv')\n\n# We'll generate data for the past 6 weeks\nweeknos = reversed(range(6))\n\nfile_paths = []\nfor weekno in weeknos:\n \n # Get the date X weeks ago\n data_date = dt.date.today() - dt.timedelta(weeks=weekno)\n \n # Modify data to ceate some drift\n data['Pregnancies'] = data['Pregnancies'] + 1\n data['Age'] = round(data['Age'] * 1.2).astype(int)\n data['BMI'] = data['BMI'] * 1.1\n \n # Save the file with the date encoded in the filename\n file_path = 'data/diabetes_{}.csv'.format(data_date.strftime(\"%Y-%m-%d\"))\n data.to_csv(file_path)\n file_paths.append(file_path)\n\n# Upload the files\npath_on_datastore = 'diabetes-target'\ndefault_ds.upload_files(files=file_paths,\n target_path=path_on_datastore,\n overwrite=True,\n show_progress=True)\n\n# Use the folder partition format to define a dataset with a 'date' timestamp column\npartition_format = path_on_datastore + '/diabetes_{date:yyyy-MM-dd}.csv'\ntarget_data_set = Dataset.Tabular.from_delimited_files(path=(default_ds, path_on_datastore + '/*.csv'),\n partition_format=partition_format)\n\n# Register the target dataset\nprint('Registering target dataset...')\ntarget_data_set = target_data_set.with_timestamp_columns('date').register(workspace=ws,\n name='diabetes target',\n description='diabetes target data',\n tags = {'format':'CSV'},\n create_new_version=True)\n\nprint('Target dataset registered!')", "_____no_output_____" ] ], [ [ "## Create a data drift monitor\n\nNow you're ready to create a data drift monitor for the diabetes data. The data drift monitor will run periodicaly or on-demand to compare the baseline dataset with the target dataset, to which new data will be added over time.\n\n### Create a compute target\n\nTo run the data drift monitor, you'll need a compute target. Run the following cell to specify a compute cluster (if it doesn't exist, it will be created).\n\n> **Important**: Change *your-compute-cluster* to the name of your compute cluster in the code below before running it! Cluster names must be globally unique names between 2 to 16 characters in length. Valid characters are letters, digits, and the - character.", "_____no_output_____" ] ], [ [ "from azureml.core.compute import ComputeTarget, AmlCompute\nfrom azureml.core.compute_target import ComputeTargetException\n\ncluster_name = \"your-compute-cluster\"\n\ntry:\n # Check for existing compute target\n training_cluster = ComputeTarget(workspace=ws, name=cluster_name)\n print('Found existing cluster, use it.')\nexcept ComputeTargetException:\n # If it doesn't already exist, create it\n try:\n compute_config = AmlCompute.provisioning_configuration(vm_size='STANDARD_DS11_V2', max_nodes=2)\n training_cluster = ComputeTarget.create(ws, cluster_name, compute_config)\n training_cluster.wait_for_completion(show_output=True)\n except Exception as ex:\n print(ex)\n ", "_____no_output_____" ] ], [ [ "> **Note**: Compute instances and clusters are based on standard Azure virtual machine images. For this exercise, the *Standard_DS11_v2* image is recommended to achieve the optimal balance of cost and performance. If your subscription has a quota that does not include this image, choose an alternative image; but bear in mind that a larger image may incur higher cost and a smaller image may not be sufficient to complete the tasks. Alternatively, ask your Azure administrator to extend your quota.\n\n### Define the data drift monitor\n\nNow you're ready to use a **DataDriftDetector** class to define the data drift monitor for your data. You can specify the features you want to monitor for data drift, the name of the compute target to be used to run the monitoring process, the frequency at which the data should be compared, the data drift threshold above which an alert should be triggered, and the latency (in hours) to allow for data collection.", "_____no_output_____" ] ], [ [ "from azureml.datadrift import DataDriftDetector\n\n# set up feature list\nfeatures = ['Pregnancies', 'Age', 'BMI']\n\n# set up data drift detector\nmonitor = DataDriftDetector.create_from_datasets(ws, 'mslearn-diabates-drift', baseline_data_set, target_data_set,\n compute_target=cluster_name, \n frequency='Week', \n feature_list=features, \n drift_threshold=.3, \n latency=24)\nmonitor", "_____no_output_____" ] ], [ [ "## Backfill the data drift monitor\n\nYou have a baseline dataset and a target dataset that includes simulated weekly data collection for six weeks. You can use this to backfill the monitor so that it can analyze data drift between the original baseline and the target data.\n\n> **Note** This may take some time to run, as the compute target must be started to run the backfill analysis. The widget may not always update to show the status, so click the link to observe the experiment status in Azure Machine Learning studio!", "_____no_output_____" ] ], [ [ "from azureml.widgets import RunDetails\n\nbackfill = monitor.backfill(dt.datetime.now() - dt.timedelta(weeks=6), dt.datetime.now())\n\nRunDetails(backfill).show()\nbackfill.wait_for_completion()", "_____no_output_____" ] ], [ [ "## Analyze data drift\n\nYou can use the following code to examine data drift for the points in time collected in the backfill run.", "_____no_output_____" ] ], [ [ "drift_metrics = backfill.get_metrics()\nfor metric in drift_metrics:\n print(metric, drift_metrics[metric])", "_____no_output_____" ] ], [ [ "You can also visualize the data drift metrics in [Azure Machine Learning studio](https://ml.azure.com) by following these steps:\n\n1. On the **Datasets** page, view the **Dataset monitors** tab.\n2. Click the data drift monitor you want to view.\n3. Select the date range over which you want to view data drift metrics (if the column chart does not show multiple weeks of data, wait a minute or so and click **Refresh**).\n4. Examine the charts in the **Drift overview** section at the top, which show overall drift magnitude and the drift contribution per feature.\n5. Explore the charts in the **Feature detail** section at the bottom, which enable you to see various measures of drift for individual features.\n\n> **Note**: For help understanding the data drift metrics, see the [How to monitor datasets](https://docs.microsoft.com/azure/machine-learning/how-to-monitor-datasets#understanding-data-drift-results) in the Azure Machine Learning documentation.\n\n## Explore further\n\nThis lab is designed to introduce you to the concepts and principles of data drift monitoring. To learn more about monitoring data drift using datasets, see the [Detect data drift on datasets](https://docs.microsoft.com/azure/machine-learning/how-to-monitor-datasets) in the Azure machine Learning documentation.\n\nYou can also collect data from published services and use it as a target dataset for datadrift monitoring. See [Collect data from models in production](https://docs.microsoft.com/azure/machine-learning/how-to-enable-data-collection) for details.\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cbffdcd6f04745c6461150c133545efb5cdb35b6
360,559
ipynb
Jupyter Notebook
.ipynb_checkpoints/P1-checkpoint.ipynb
zagfox/carP1
4ace67e47b80205d8e68d6fb2c049eb4c519befc
[ "MIT" ]
null
null
null
.ipynb_checkpoints/P1-checkpoint.ipynb
zagfox/carP1
4ace67e47b80205d8e68d6fb2c049eb4c519befc
[ "MIT" ]
null
null
null
.ipynb_checkpoints/P1-checkpoint.ipynb
zagfox/carP1
4ace67e47b80205d8e68d6fb2c049eb4c519befc
[ "MIT" ]
null
null
null
538.147761
117,548
0.941064
[ [ [ "# Self-Driving Car Engineer Nanodegree\n\n\n## Project: **Finding Lane Lines on the Road** \n***\nIn this project, you will use the tools you learned about in the lesson to identify lane lines on the road. You can develop your pipeline on a series of individual images, and later apply the result to a video stream (really just a series of images). Check out the video clip \"raw-lines-example.mp4\" (also contained in this repository) to see what the output should look like after using the helper functions below. \n\nOnce you have a result that looks roughly like \"raw-lines-example.mp4\", you'll need to get creative and try to average and/or extrapolate the line segments you've detected to map out the full extent of the lane lines. You can see an example of the result you're going for in the video \"P1_example.mp4\". Ultimately, you would like to draw just one line for the left side of the lane, and one for the right.\n\nIn addition to implementing code, there is a brief writeup to complete. The writeup should be completed in a separate file, which can be either a markdown file or a pdf document. There is a [write up template](https://github.com/udacity/CarND-LaneLines-P1/blob/master/writeup_template.md) that can be used to guide the writing process. Completing both the code in the Ipython notebook and the writeup template will cover all of the [rubric points](https://review.udacity.com/#!/rubrics/322/view) for this project.\n\n---\nLet's have a look at our first image called 'test_images/solidWhiteRight.jpg'. Run the 2 cells below (hit Shift-Enter or the \"play\" button above) to display the image.\n\n**Note: If, at any point, you encounter frozen display windows or other confounding issues, you can always start again with a clean slate by going to the \"Kernel\" menu above and selecting \"Restart & Clear Output\".**\n\n---", "_____no_output_____" ], [ "**The tools you have are color selection, region of interest selection, grayscaling, Gaussian smoothing, Canny Edge Detection and Hough Tranform line detection. You are also free to explore and try other techniques that were not presented in the lesson. Your goal is piece together a pipeline to detect the line segments in the image, then average/extrapolate them and draw them onto the image for display (as below). Once you have a working pipeline, try it out on the video stream below.**\n\n---\n\n<figure>\n <img src=\"examples/line-segments-example.jpg\" width=\"380\" alt=\"Combined Image\" />\n <figcaption>\n <p></p> \n <p style=\"text-align: center;\"> Your output should look something like this (above) after detecting line segments using the helper functions below </p> \n </figcaption>\n</figure>\n <p></p> \n<figure>\n <img src=\"examples/laneLines_thirdPass.jpg\" width=\"380\" alt=\"Combined Image\" />\n <figcaption>\n <p></p> \n <p style=\"text-align: center;\"> Your goal is to connect/average/extrapolate line segments to get output like this</p> \n </figcaption>\n</figure>", "_____no_output_____" ], [ "**Run the cell below to import some packages. If you get an `import error` for a package you've already installed, try changing your kernel (select the Kernel menu above --> Change Kernel). Still have problems? Try relaunching Jupyter Notebook from the terminal prompt. Also, consult the forums for more troubleshooting tips.** ", "_____no_output_____" ], [ "## Import Packages", "_____no_output_____" ] ], [ [ "#importing some useful packages\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\nimport numpy as np\nimport cv2\n%matplotlib inline", "_____no_output_____" ] ], [ [ "## Read in an Image", "_____no_output_____" ] ], [ [ "#reading in an image\nimage = mpimg.imread('test_images/solidWhiteRight.jpg')\n\n#printing out some stats and plotting\nprint('This image is:', type(image), 'with dimensions:', image.shape)\nxSize = image.shape[1]\nprint(xSize)\nplt.imshow(image) # if you wanted to show a single color channel image called 'gray', for example, call as plt.imshow(gray, cmap='gray')", "('This image is:', <type 'numpy.ndarray'>, 'with dimensions:', (540, 960, 3))\n960\n" ] ], [ [ "## Ideas for Lane Detection Pipeline", "_____no_output_____" ], [ "**Some OpenCV functions (beyond those introduced in the lesson) that might be useful for this project are:**\n\n`cv2.inRange()` for color selection \n`cv2.fillPoly()` for regions selection \n`cv2.line()` to draw lines on an image given endpoints \n`cv2.addWeighted()` to coadd / overlay two images \n`cv2.cvtColor()` to grayscale or change color \n`cv2.imwrite()` to output images to file \n`cv2.bitwise_and()` to apply a mask to an image\n\n**Check out the OpenCV documentation to learn about these and discover even more awesome functionality!**", "_____no_output_____" ], [ "## Helper Functions", "_____no_output_____" ], [ "Below are some helper functions to help get you started. They should look familiar from the lesson!", "_____no_output_____" ] ], [ [ "import math\n\ndef grayscale(img):\n \"\"\"Applies the Grayscale transform\n This will return an image with only one color channel\n but NOTE: to see the returned image as grayscale\n (assuming your grayscaled image is called 'gray')\n you should call plt.imshow(gray, cmap='gray')\"\"\"\n return cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)\n # Or use BGR2GRAY if you read an image with cv2.imread()\n # return cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n \ndef canny(img, low_threshold, high_threshold):\n \"\"\"Applies the Canny transform\"\"\"\n return cv2.Canny(img, low_threshold, high_threshold)\n\ndef gaussian_blur(img, kernel_size):\n \"\"\"Applies a Gaussian Noise kernel\"\"\"\n return cv2.GaussianBlur(img, (kernel_size, kernel_size), 0)\n\ndef region_of_interest(img, vertices):\n \"\"\"\n Applies an image mask.\n \n Only keeps the region of the image defined by the polygon\n formed from `vertices`. The rest of the image is set to black.\n `vertices` should be a numpy array of integer points.\n \"\"\"\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image\n\n\ndef draw_lines(img, lines, color=[255, 0, 0], thickness=8):\n \"\"\"\n NOTE: this is the function you might want to use as a starting point once you want to \n average/extrapolate the line segments you detect to map out the full\n extent of the lane (going from the result shown in raw-lines-example.mp4\n to that shown in P1_example.mp4). \n \n Think about things like separating line segments by their \n slope ((y2-y1)/(x2-x1)) to decide which segments are part of the left\n line vs. the right line. Then, you can average the position of each of \n the lines and extrapolate to the top and bottom of the lane.\n \n This function draws `lines` with `color` and `thickness`. \n Lines are drawn on the image inplace (mutates the image).\n If you want to make the lines semi-transparent, think about combining\n this function with the weighted_img() function below\n \"\"\"\n \n # If line is on the left half, and slope is expected, extend it to the bottom\n new_lines = []\n for line in lines:\n for x1,y1,x2,y2 in line:\n slope = (y2-y1)/(x2-x1)\n if x1 < 480 and x2 < 480 and slope <-0.6:\n xb = min(x1, x2)\n yb = max(y1, y2)\n xn = math.floor(xb +(540-yb)/slope)\n yn = 540\n new_lines.append([[xb, yb, int(xn), yn]])\n if x1 > 480 and x2 > 480 and slope >0.6:\n xb = max(x1, x2)\n yb = max(y1, y2)\n xn = math.floor(xb +(540-yb)/slope)\n yn = 540\n new_lines.append([[int(xb), int(yb), int(xn), int(yn)]])\n \n \n \n \n for line in lines:\n for x1,y1,x2,y2 in line:\n cv2.line(img, (x1, y1), (x2, y2), color, thickness)\n \n \n for line in new_lines:\n for x1,y1,x2,y2 in line:\n cv2.line(img, (x1, y1), (x2, y2), color, thickness)\n \n \n \n \n\ndef hough_lines(img, rho, theta, threshold, min_line_len, max_line_gap):\n \"\"\"\n `img` should be the output of a Canny transform.\n \n Returns an image with hough lines drawn.\n \"\"\"\n lines = cv2.HoughLinesP(img, rho, theta, threshold, np.array([]), minLineLength=min_line_len, maxLineGap=max_line_gap)\n line_img = np.zeros((img.shape[0], img.shape[1], 3), dtype=np.uint8)\n draw_lines(line_img, lines)\n return line_img\n\n# Python 3 has support for cool math symbols.\n\ndef weighted_img(img, initial_img, a=0.8, b=1., c=0.):\n \"\"\"\n `img` is the output of the hough_lines(), An image with lines drawn on it.\n Should be a blank image (all black) with lines drawn on it.\n \n `initial_img` should be the image before any processing.\n \n The result image is computed as follows:\n \n initial_img * α + img * β + γ\n NOTE: initial_img and img must be the same shape!\n \"\"\"\n return cv2.addWeighted(initial_img, a, img, b, c)", "_____no_output_____" ] ], [ [ "## Test Images\n\nBuild your pipeline to work on the images in the directory \"test_images\" \n**You should make sure your pipeline works well on these images before you try the videos.**", "_____no_output_____" ] ], [ [ "import os\nos.listdir(\"test_images/\")", "_____no_output_____" ] ], [ [ "## Build a Lane Finding Pipeline\n\n", "_____no_output_____" ], [ "Build the pipeline and run your solution on all test_images. Make copies into the `test_images_output` directory, and you can use the images in your writeup report.\n\nTry tuning the various parameters, especially the low and high Canny thresholds as well as the Hough lines parameters.", "_____no_output_____" ] ], [ [ "import matplotlib.image as mpimg\nimport matplotlib.pyplot as plt\n\n# TODO: Build your pipeline that will draw lane lines on the test_images\n# then save them to the test_images_output directory.\ndef run_pipeline(image):\n plt.imshow(image)\n gray_img = grayscale(image)\n plt.imshow(gray_img)\n blur_gray_img = gaussian_blur(gray_img, 5)\n plt.imshow(blur_gray_img)\n canny_img = canny(blur_gray_img, 50, 150)\n plt.imshow(canny_img)\n \n vertices = np.array([[(0,540),(450, 320), (490, 320), (960,540)]], dtype=np.int32)\n mask_canny_img = region_of_interest(canny_img, vertices)\n plt.imshow(mask_canny_img)\n \n line_img = hough_lines(mask_canny_img, 2, np.pi/180, 15, 40, 15)\n plt.imshow(line_img)\n \n result = weighted_img(image, line_img)\n plt.imshow(result)\n \n return result\n\n\n#image = mpimg.imread('test_images/solidWhiteCurve.jpg')\n#image = mpimg.imread('test_images/solidYellowCurve.jpg')\n#image = mpimg.imread('test_images/solidYellowLeft.jpg')\n#image = mpimg.imread('test_images/solidYellowCurve2.jpg')\n#image = mpimg.imread('test_images/solidWhiteRight.jpg')\nimage = mpimg.imread('test_images/whiteCarLaneSwitch.jpg')\nresult = run_pipeline(image)\n", "_____no_output_____" ] ], [ [ "## Test on Videos\n\nYou know what's cooler than drawing lanes over images? Drawing lanes over video!\n\nWe can test our solution on two provided videos:\n\n`solidWhiteRight.mp4`\n\n`solidYellowLeft.mp4`\n\n**Note: if you get an import error when you run the next cell, try changing your kernel (select the Kernel menu above --> Change Kernel). Still have problems? Try relaunching Jupyter Notebook from the terminal prompt. Also, consult the forums for more troubleshooting tips.**\n\n**If you get an error that looks like this:**\n```\nNeedDownloadError: Need ffmpeg exe. \nYou can download it by calling: \nimageio.plugins.ffmpeg.download()\n```\n**Follow the instructions in the error message and check out [this forum post](https://discussions.udacity.com/t/project-error-of-test-on-videos/274082) for more troubleshooting tips across operating systems.**", "_____no_output_____" ] ], [ [ "# Import everything needed to edit/save/watch video clips\nfrom moviepy.editor import VideoFileClip\nfrom IPython.display import HTML", "_____no_output_____" ], [ "def process_image(image):\n # NOTE: The output you return should be a color image (3 channel) for processing video below\n # TODO: put your pipeline here,\n # you should return the final output (image where lines are drawn on lanes)\n result = run_pipeline(image)\n return result", "_____no_output_____" ] ], [ [ "Let's try the one with the solid white lane on the right first ...", "_____no_output_____" ] ], [ [ "white_output = 'test_videos_output/solidWhiteRight.mp4'\n## To speed up the testing process you may want to try your pipeline on a shorter subclip of the video\n## To do so add .subclip(start_second,end_second) to the end of the line below\n## Where start_second and end_second are integer values representing the start and end of the subclip\n## You may also uncomment the following line for a subclip of the first 5 seconds\n##clip1 = VideoFileClip(\"test_videos/solidWhiteRight.mp4\").subclip(0,5)\nclip1 = VideoFileClip(\"test_videos/solidWhiteRight.mp4\")\nwhite_clip = clip1.fl_image(process_image) #NOTE: this function expects color images!!\n%time white_clip.write_videofile(white_output, audio=False)", "[MoviePy] >>>> Building video test_videos_output/solidWhiteRight.mp4\n[MoviePy] Writing video test_videos_output/solidWhiteRight.mp4\n" ] ], [ [ "Play the video inline, or if you prefer find the video in your filesystem (should be in the same directory) and play it in your video player of choice.", "_____no_output_____" ] ], [ [ "HTML(\"\"\"\n<video width=\"960\" height=\"540\" controls>\n <source src=\"{0}\">\n</video>\n\"\"\".format(white_output))", "_____no_output_____" ] ], [ [ "## Improve the draw_lines() function\n\n**At this point, if you were successful with making the pipeline and tuning parameters, you probably have the Hough line segments drawn onto the road, but what about identifying the full extent of the lane and marking it clearly as in the example video (P1_example.mp4)? Think about defining a line to run the full length of the visible lane based on the line segments you identified with the Hough Transform. As mentioned previously, try to average and/or extrapolate the line segments you've detected to map out the full extent of the lane lines. You can see an example of the result you're going for in the video \"P1_example.mp4\".**\n\n**Go back and modify your draw_lines function accordingly and try re-running your pipeline. The new output should draw a single, solid line over the left lane line and a single, solid line over the right lane line. The lines should start from the bottom of the image and extend out to the top of the region of interest.**", "_____no_output_____" ], [ "Now for the one with the solid yellow lane on the left. This one's more tricky!", "_____no_output_____" ] ], [ [ "yellow_output = 'test_videos_output/solidYellowLeft.mp4'\n## To speed up the testing process you may want to try your pipeline on a shorter subclip of the video\n## To do so add .subclip(start_second,end_second) to the end of the line below\n## Where start_second and end_second are integer values representing the start and end of the subclip\n## You may also uncomment the following line for a subclip of the first 5 seconds\n##clip2 = VideoFileClip('test_videos/solidYellowLeft.mp4').subclip(0,5)\nclip2 = VideoFileClip('test_videos/solidYellowLeft.mp4')\nyellow_clip = clip2.fl_image(process_image)\n%time yellow_clip.write_videofile(yellow_output, audio=False)", "_____no_output_____" ], [ "HTML(\"\"\"\n<video width=\"960\" height=\"540\" controls>\n <source src=\"{0}\">\n</video>\n\"\"\".format(yellow_output))", "_____no_output_____" ] ], [ [ "## Writeup and Submission\n\nIf you're satisfied with your video outputs, it's time to make the report writeup in a pdf or markdown file. Once you have this Ipython notebook ready along with the writeup, it's time to submit for review! Here is a [link](https://github.com/udacity/CarND-LaneLines-P1/blob/master/writeup_template.md) to the writeup template file.\n", "_____no_output_____" ], [ "## Optional Challenge\n\nTry your lane finding pipeline on the video below. Does it still work? Can you figure out a way to make it more robust? If you're up for the challenge, modify your pipeline so it works with this video and submit it along with the rest of your project!", "_____no_output_____" ] ], [ [ "challenge_output = 'test_videos_output/challenge.mp4'\n## To speed up the testing process you may want to try your pipeline on a shorter subclip of the video\n## To do so add .subclip(start_second,end_second) to the end of the line below\n## Where start_second and end_second are integer values representing the start and end of the subclip\n## You may also uncomment the following line for a subclip of the first 5 seconds\n##clip3 = VideoFileClip('test_videos/challenge.mp4').subclip(0,5)\nclip3 = VideoFileClip('test_videos/challenge.mp4')\nchallenge_clip = clip3.fl_image(process_image)\n%time challenge_clip.write_videofile(challenge_output, audio=False)", "_____no_output_____" ], [ "HTML(\"\"\"\n<video width=\"960\" height=\"540\" controls>\n <source src=\"{0}\">\n</video>\n\"\"\".format(challenge_output))", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ] ]
cbffe06c4a7980798361ce5f425f0f022f32d469
2,866
ipynb
Jupyter Notebook
anagram_detector.ipynb
dkfreitag/algorithms-and-data-structures
3b10dddb1db66b1b1a6c3cb41c8bcdfc654fd89e
[ "MIT" ]
null
null
null
anagram_detector.ipynb
dkfreitag/algorithms-and-data-structures
3b10dddb1db66b1b1a6c3cb41c8bcdfc654fd89e
[ "MIT" ]
null
null
null
anagram_detector.ipynb
dkfreitag/algorithms-and-data-structures
3b10dddb1db66b1b1a6c3cb41c8bcdfc654fd89e
[ "MIT" ]
null
null
null
23.68595
94
0.495464
[ [ [ "# Anagram detector\n# Speed: O(n)\n\ndef anagramDetector(string_1, string_2):\n # initialize empty lists, one value for each letter in the alphabet\n char_cnt_1 = [0] * 26\n char_cnt_2 = [0] * 26\n\n letter_index = 0\n\n # loop through the first string and count the number of times each letter occurs\n for i in range(len(string_1)):\n # what is the index of the letter from 0 to 25?\n letter_index = ord(string_1[i]) - ord('a')\n\n # at the letter index (i.e. index 1 for 'b'), increase the count by 1 \n char_cnt_1[letter_index] += 1\n\n # repeat with string_2\n for i in range(len(string_2)):\n letter_index = ord(string_2[i]) - ord('a')\n char_cnt_2[letter_index] += 1\n\n j = 0\n is_anagram = True\n \n # loop through every letter\n while j < 26 and is_anagram:\n # is the count of times each letter occurs the same in both strings?\n if char_cnt_1[j] == char_cnt_2[j]:\n j += 1\n else:\n # they're not the same, it's not an anagram\n is_anagram = False\n\n return is_anagram\n", "_____no_output_____" ], [ "anagramDetector('yahoo', 'hooya')", "_____no_output_____" ], [ "anagramDetector('dude', 'bro')", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code" ] ]
cbffe1b77eae52a4b6d750490cfec3eacbfd53f8
3,151
ipynb
Jupyter Notebook
f0018/f0018-b.ipynb
soda-lab/Features
1d94c0f6343d36d9e5b1e150311e4e7ccdf0b7d9
[ "Unlicense" ]
null
null
null
f0018/f0018-b.ipynb
soda-lab/Features
1d94c0f6343d36d9e5b1e150311e4e7ccdf0b7d9
[ "Unlicense" ]
1
2019-05-05T03:45:14.000Z
2019-05-09T05:04:00.000Z
f0018/f0018-b.ipynb
soda-lab/Features
1d94c0f6343d36d9e5b1e150311e4e7ccdf0b7d9
[ "Unlicense" ]
null
null
null
21.731034
84
0.503967
[ [ [ "## Import Libraries", "_____no_output_____" ] ], [ [ "import os\nimport glob\nimport pandas as pd\nimport os", "_____no_output_____" ] ], [ [ "## Read Config File", "_____no_output_____" ] ], [ [ "import configparser\nconfig = configparser.ConfigParser()\nconfig.read('config.ini')\ninput_folder = config['DEFAULT']['Input-Folder']\noutput_folder = config['DEFAULT']['Output-Folder']", "_____no_output_____" ] ], [ [ "## Supporting Functions", "_____no_output_____" ] ], [ [ "# create foler if not exist\ndef create_folder(folder):\n if not os.path.exists(folder):\n os.makedirs(folder)", "_____no_output_____" ] ], [ [ "## Get Files", "_____no_output_____" ] ], [ [ "extension = 'csv'\nresult = glob.glob('{}*.{}'.format(input_folder,extension))", "_____no_output_____" ] ], [ [ "## Add Columns (Week-Num && Collection-Country)", "_____no_output_____" ] ], [ [ "create_folder(output_folder)\nfor file in sorted(result):\n df = pd.read_csv(file,header=0, encoding=\"UTF-8\")\n filename = os.path.basename(file)\n \n # get year_week\n y_week = \"_\".join(filename.split(\"_\", 2)[:2])\n week = y_week.split(\"_W\")[1]\n if len(week) == 1:\n week = \"0\"+week\n year_week = y_week[:6] + week\n \n # get collection_country\n collection_country = \"_\".join(filename.split(\"_\", 2)[2:])\n collection_country = collection_country.split(\"Twitter_\")[1][:-4]\n \n # add columns\n df[\"year-week\"] = year_week\n df[\"collection_country\"] = collection_country\n \n # write into csv\n df.to_csv(output_folder+filename, sep=',',index = False, encoding='UTF-8')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cbffeade6feaa78507456d07514afcc9b381ceac
6,211
ipynb
Jupyter Notebook
course/chapter4/section3_tf.ipynb
Shamik-07/notebooks
1783fb76ebbf697856863d83cf0ca0dfaf6c232e
[ "Apache-2.0" ]
null
null
null
course/chapter4/section3_tf.ipynb
Shamik-07/notebooks
1783fb76ebbf697856863d83cf0ca0dfaf6c232e
[ "Apache-2.0" ]
13
2021-12-08T15:48:48.000Z
2021-12-28T00:20:01.000Z
course/chapter4/section3_tf.ipynb
Shamik-07/notebooks
1783fb76ebbf697856863d83cf0ca0dfaf6c232e
[ "Apache-2.0" ]
null
null
null
21.946996
115
0.550153
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
cbfff8066a6bcfbc63dce44d4d39f1bf623555d1
4,549
ipynb
Jupyter Notebook
Alerta_de_datas.ipynb
wesleyssantos27/Primeiro-repositorio
3935c5352cd8889934dcb2917bab63530fff2d03
[ "MIT" ]
null
null
null
Alerta_de_datas.ipynb
wesleyssantos27/Primeiro-repositorio
3935c5352cd8889934dcb2917bab63530fff2d03
[ "MIT" ]
null
null
null
Alerta_de_datas.ipynb
wesleyssantos27/Primeiro-repositorio
3935c5352cd8889934dcb2917bab63530fff2d03
[ "MIT" ]
null
null
null
23.816754
250
0.430644
[ [ [ "<a href=\"https://colab.research.google.com/github/wesleyssantos27/Primeiro-repositorio/blob/master/Alerta_de_datas.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "from google.colab import auth\nimport pandas as pd\nfrom datetime import datetime\n\nauth.authenticate_user()\n\nimport gspread\nfrom oauth2client.client import GoogleCredentials\n\ngc = gspread.authorize(GoogleCredentials.get_application_default())", "_____no_output_____" ], [ "spreadsheet = gc.open('Datas')", "_____no_output_____" ], [ "page = spreadsheet.sheet1", "_____no_output_____" ], [ "page.row_values(1)", "_____no_output_____" ], [ "datas = pd.DataFrame(page.get_all_values())\ndatas.columns = datas.iloc[0]\ndatas = datas.drop(datas.index[0])\ndatas['Data'] = pd.to_datetime(datas['Data'])\ndisplay(datas['Data'])", "_____no_output_____" ], [ "today = datetime.now()\n\nfor data in datas['Data']:\n print(data.day,'/',data.month)", "17 / 10\n12 / 7\n27 / 7\n5 / 10\n" ], [ "print(today.day)\nprint(today.month)", "10\n5\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ] ]
cbfffc45f551a78167679362164a41714d8a4009
240,315
ipynb
Jupyter Notebook
notebooks/model_tweets.ipynb
thonic92/chal_TM
5b9535fdc9319773c6b7a4eac6225821d86defde
[ "MIT" ]
4
2018-10-04T07:57:58.000Z
2018-11-15T14:15:53.000Z
notebooks/model_tweets.ipynb
thonic92/chal_TM
5b9535fdc9319773c6b7a4eac6225821d86defde
[ "MIT" ]
null
null
null
notebooks/model_tweets.ipynb
thonic92/chal_TM
5b9535fdc9319773c6b7a4eac6225821d86defde
[ "MIT" ]
1
2019-03-19T16:21:41.000Z
2019-03-19T16:21:41.000Z
415.769896
125,284
0.555883
[ [ [ "[View in Colaboratory](https://colab.research.google.com/github/thonic92/chal_TM/blob/master/model_tweets.ipynb)", "_____no_output_____" ] ], [ [ "import json\nimport numpy as np\nimport pandas as pd\nimport tensorflow as tf\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.layers import Dropout\nfrom keras.layers import LSTM\nfrom keras.callbacks import ModelCheckpoint\nfrom keras.utils import np_utils\nimport sys\nimport re\nimport unicodedata\nfrom collections import Counter\nimport nltk", "_____no_output_____" ], [ "with open(\"/content/gdrive/My Drive/json_datas_full.json\", \"r\", encoding=\"latin1\",errors='ignore' ) as read_file:\n data = json.load(read_file)", "_____no_output_____" ], [ "tweets = []", "_____no_output_____" ], [ "for i in range(len(data)):\n tweets.append(data[i]['text'].lower())\nprint(tweets[0:2])", "['jo 2024 : nouvelle étape vers la candidature de paris: la candidature de la capitale aux jeux de 2024 se met en\\x85 http://t.co/5nh0szbdg2', 'http://t.co/opwyuqtxhd hidalgo favorable à une candidature de paris aux jo de 2024 - challenges\\x85 http://t.co/ykivdlazbn']\n" ], [ "tweets_str = ' '.join(tweets)\ntweets_str=unicodedata.normalize('NFD',tweets_str).encode('ascii', 'ignore').decode(\"utf-8\")\nprint(tweets_str[0:1000])", "jo 2024 : nouvelle etape vers la candidature de paris: la candidature de la capitale aux jeux de 2024 se met en http://t.co/5nh0szbdg2 http://t.co/opwyuqtxhd hidalgo favorable a une candidature de paris aux jo de 2024 - challenges http://t.co/ykivdlazbn jo 2024 : hidalgo veut \"engager pleinement\" paris http://t.co/n3pj6xsycx \"hidalgo veut \"engager pleinement\" paris vers les jo 2024\" d'ici la les gymnases ne seront plus en greve..peut-etre. http://t.co/ec5lwxel7q jo 2024 : nouvelle etape vers la candidature deparis http://t.co/t1ogsrmyoy jo 2024 : nouvelle etape vers la candidature de paris http://t.co/t1ogsrmyoy http://t.co/ek6agnpkmh jo 2024: nouveau coup de fouet pour la candidature parisienne - 23/03/2015 - ladepeche.fr http://t.co/rdyfw7k1ep #paris2024 la maire de paris s'engage pleinement dans la candidature pour les ... - rtbf http://t.co/obxtpgh2jm http://t.co/opwyuqtxhd hidalgo favorable a une candidature de paris aux jo de 2024 - challenges http://t.co/iyiu3svbfy la maire de \n" ], [ "tweets_words = tweets_str.split(' ')\n#print(tweets_words[1:100])\ntype(tweets_words)\n#print(sorted(set(tweets_words)))\nprint(Counter(tweets_words).most_common()[0:100])\nprint(Counter(list(nltk.bigrams(tweets_words))).most_common()[0:100])", "[('pour', 24701), ('de', 21429), ('#paris2024', 17208), ('la', 14936), ('les', 14340), ('a', 12300), ('le', 11542), ('et', 8897), ('#jo2024', 8855), ('des', 7351), (':', 6437), ('!', 6222), ('du', 5767), ('en', 5578), ('', 5574), ('?', 4948), ('un', 4414), ('paris', 3920), ('au', 3314), ('une', 3306), ('sur', 3018), ('candidature', 3015), ('jo', 2925), ('que', 2905), ('avec', 2900), ('jeux', 2691), ('sport', 2650), ('2024', 2439), ('est', 2421), ('pas', 2399), ('aux', 2205), ('@paris2024', 2205), ('on', 2140), ('dans', 2097), ('#paris', 2029), ('qui', 2007), ('ce', 1987), (\"c'est\", 1923), ('vous', 1722), ('se', 1559), ('je', 1513), ('#sport', 1475), ('bien', 1391), ('via', 1360), ('par', 1343), ('-', 1334), ('nous', 1333), ('plus', 1263), ('tous', 1218), ('olympiques', 1157), ('ca', 1155), ('@anne_hidalgo', 1152), ('il', 1142), ('france', 1140), ('??', 1106), ('olympique', 1097), ('ne', 1094), ('merci', 1081), ('va', 1060), ('mais', 1037), ('faire', 964), ('#jo', 937), ('son', 886), ('sont', 859), ('comme', 813), ('&amp;', 808), ('notre', 805), ('projet', 803), ('ville', 768), ('tout', 764), ('si', 756), ('cette', 756), ('sa', 755), ('@paris', 747), ('ou', 746), ('fond', 741), ('sera', 727), ('#paris2024,', 722), ('@tonyestanguet', 716), ('felicitations', 716), ('francais', 703), ('etre', 696), ('vivre', 685), ('soutenir', 683), ('etes', 680), ('#olympicday', 664), ('y', 654), ('contre', 638), ('#enmodejo', 624), ('grand', 623), ('tres', 614), ('#paris2024\\npour', 608), ('fait', 603), ('avant', 569), ('soutien', 545), ('monde', 543), ('veut', 537), ('nos', 533), ('bravo', 525), ('#grandparis', 522)]\n[(('pour', 'les'), 4289), (('de', 'la'), 2907), (('pour', '#paris2024'), 2605), (('les', '#jo2024'), 2231), (('pour', 'la'), 2154), (('la', 'candidature'), 2137), (('pour', 'le'), 1593), (('candidature', 'de'), 1587), (('de', '#paris2024'), 1423), (('les', 'jo'), 1364), (('#jo2024', ':'), 1322), (('les', 'jeux'), 1243), (('a', 'la'), 1080), (('de', 'paris'), 1078), (('le', 'sport'), 1025), (('#paris2024', ':'), 970), (('jeux', 'olympiques'), 925), (('#paris2024', '!'), 795), (('des', 'jeux'), 794), (('du', 'sport'), 749), (('', ''), 692), (('a', 'fond'), 687), (('des', '#jo2024'), 682), (('a', 'paris'), 672), (('vous', 'etes'), 659), (('jo', '2024'), 626), (('la', 'france'), 626), (('vivre', 'a'), 599), (('#enmodejo', '!'), 599), (('felicitations', 'vous'), 598), (('etes', '#enmodejo'), 598), (('!', '#paris2024\\npour'), 598), (('#paris2024\\npour', 'vivre'), 598), (('#olympicday', ':'), 598), (('fond', '#olympicday'), 597), (('sur', 'le'), 566), (('#paris2024', 'et'), 565), (('pour', 'soutenir'), 553), (('et', 'le'), 549), (('#paris2024', 'pour'), 523), (('#jo2024', 'a'), 510), (('#paris2024', ''), 510), (('de', '#paris'), 497), (('#paris2024', '?'), 493), (('dans', 'le'), 490), (('!', '#paris2024'), 483), (('des', 'jo'), 479), (('2024', ':'), 464), (('sur', 'les'), 424), (('jo', 'de'), 421), (('paris', '2024'), 421), (('aux', '#jo2024'), 416), (('paris', 'pour'), 411), (('dans', 'la'), 409), (('la', 'ville'), 409), (('sur', 'la'), 405), (('a', '#paris2024'), 393), (('#jo2024', '#paris2024'), 384), ((':', 'la'), 380), (('los', 'angeles'), 380), ((':', 'le'), 371), (('et', 'de'), 365), (('de', '2024'), 352), (('et', 'les'), 351), (('jo', '#paris2024'), 349), (('pour', 'accueillir'), 347), (('que', 'les'), 345), (('jeux', 'de'), 341), (('#paris2024', '#jo2024'), 335), (('pour', 'un'), 332), (('tous', 'les'), 324), (('et', 'la'), 324), (('pour', 'des'), 321), (('sport', 'au'), 305), (('pour', 'une'), 304), (('#jo2024', ''), 301), ((':', 'https://t.co/w2gtuyu1lt'), 296), (('je', 'soutiens'), 290), (('les', 'epreuves'), 288), (('', '#paris2024'), 286), (('?', '#paris2024'), 285), (('pour', 'que'), 283), (('merci', 'a'), 277), (('#paris2024', 'a'), 275), (('accueillir', 'les'), 275), (('y', 'a'), 274), (('pour', '@paris2024'), 273), ((':', 'les'), 271), (('que', 'le'), 270), (('dans', 'les'), 269), (('#jo2024', '?'), 268), (('a', '#paris'), 266), (('2024', '!'), 265), (('avec', 'les'), 264), (('ceux', 'qui'), 263), (('pour', 'faire'), 255), (('ville', 'de'), 255), (('du', 'monde'), 255), (('pour', 'paris'), 254), (('sport', 'et'), 251)]\n" ], [ "# on retire les urls \ntweets_str = re.sub(\"(@[A-Za-z0-9]+)|([^0-9A-Za-z \\t])|(\\w+:\\/\\/\\S+)\",\" \", tweets_str)\ntweets_str = re.sub(\"paris2024\",\" \", tweets_str)\ntweets_str = re.sub(\"jo2024\",\" \", tweets_str)\ntweets_str = re.sub(\"jo 2024\",\" \", tweets_str)\ntweets_str = re.sub(\"jo de 2024\",\" \", tweets_str)\ntweets_str = re.sub(\"paris\",\" \", tweets_str)\ntweets_str = re.sub(\"sport\",\" \", tweets_str)\ntweets_str = re.sub(\"olympicday\",\" \", tweets_str)\ntweets_str = re.sub(\"enmodejo\",\" \", tweets_str)\ntweets_str = re.sub(\"grandparis\",\" \", tweets_str)\ntweets_str = re.sub(\"cio\",\" \", tweets_str)\ntweets_str = re.sub(\"jouerlejeu\",\" \", tweets_str)\ntweets_str = re.sub(\"jeuxolympiques\",\" \", tweets_str)\ntweets_str = re.sub(\"venezpartager\",\" \", tweets_str)\ntweets_str = re.sub(\"jo2024paris\",\" \", tweets_str)\ntweets_str = re.sub(\"jerevedesjeux\",\" \", tweets_str)\ntweets_str = re.sub(\"france\",\" \", tweets_str)\ntweets_str = re.sub(\"madeforsharing\",\" \", tweets_str)\ntweets_str = re.sub(\"rio2016\",\" \", tweets_str)\ntweets_str = re.sub(\"generation2024\",\" \", tweets_str)\ntweets_str = re.sub(\"gagnonsensemble\",\" \", tweets_str)\ntweets_str = re.sub(\"engager\",\" \", tweets_str)\ntweets_str = re.sub(\"pleinement\",\" \", tweets_str)\ntweets_str = re.sub(\"candidature\",\" \", tweets_str)\ntweets_str = re.sub(\"nouvelle etape\",\" \", tweets_str)\ntweets_str = re.sub(\"hidalgo veut\",\" \", tweets_str)\ntweets_str = re.sub(\"favorable\",\" \", tweets_str)\ntweets_str = re.sub(\"s engage\",\" \", tweets_str)\ntweets_str = ' '.join(tweets_str.split())\ntweets_str = tweets_str[0:300000]\nprint(tweets_str[0:1000])\nlen(tweets_str)\n", "vers la de la de la capitale aux jeux de 2024 se met en hidalgo a une de aux challenges vers les d ici la les gymnases ne seront plus en greve peut etre vers la de vers la de nouveau coup de fouet pour la ienne 23 03 2015 ladepeche fr la maire de dans la pour les rtbf hidalgo a une de aux challenges la maire de sengage dans la pour les constat lucide pour evenement durable boston conditionne sa au respect de 10 principes boston conditionne sa au respect de 10 principes fondamentaux boston2024 pour les hidalgo prete a via jeux olympiques vers la de la de la capitale hidalgo a une de aux challenges jeux olympiques vers la de french via nouveau coup de fouet pour la ienne logic 15 a 20 milliard de travaux amp 20 de retro commissions aux elus travaux pharaoniques inutiles voir le grece jeux olympiques vers la de 24 nouveau coup de fouet pour la ienne rtl belgique bientot une officielle de aux la maire de p medtaha mit nouveau coup de fouet pour la ienne nice matin anne hidalgo prefererait \n" ], [ "", "['la', 'de', 'la', 'de', 'la', 'capitale', 'aux', 'jeux', 'de', '2024', 'se', 'met', 'en', 'hidalgo', 'a', 'une', 'de', 'aux', 'challenges', 'vers', 'les', 'd', 'ici', 'la', 'les', 'gymnases', 'ne', 'seront', 'plus', 'en', 'greve', 'peut', 'etre', 'vers', 'la', 'de', 'vers', 'la', 'de', 'nouveau', 'coup', 'de', 'fouet', 'pour', 'la', 'ienne', '23', '03', '2015', 'ladepeche', 'fr', 'la', 'maire', 'de', 'dans', 'la', 'pour', 'les', 'rtbf', 'hidalgo', 'a', 'une', 'de', 'aux', 'challenges', 'la', 'maire', 'de', 'sengage', 'dans', 'la', 'pour', 'les', 'constat', 'lucide', 'pour', 'evenement', 'durable', 'boston', 'conditionne', 'sa', 'au', 'respect', 'de', '10', 'principes', 'boston', 'conditionne', 'sa', 'au', 'respect', 'de', '10', 'principes', 'fondamentaux', 'boston2024', 'pour', 'les', 'hidalgo']\n['0', '000', '02', '03', '05', '06', '07au', '09', '1', '10', '100', '1000', '100e', '100m', '103', '10eme', '10h', '10km', '10m', '10mds', '10milliards', '10x', '11', '1100', '110m', '115', '11md', '12', '120', '12mds', '13', '130', '130m', '135', '13eme', '14', '14juillet', '14juillet2016', '15', '150', '15000', '16', '16h', '16h30', '17', '170', '17000', '18', '18eme', '18h', '18h30', '18h50', '18milliards', '19', '1924', '1932', '1936', '196', '1972', '1984', '1985', '1992', '1998', '19h', '1compte', '1e', '1er', '1ere', '1eres', '1ers', '1m', '1mm', '1pantheonsorbonne', '1resultat', '1slogan', '1st', '1vaste', '2', '20', '200', '2000', '200000km', '2003', '2004', '2005', '2010', '2012', '2014', '2015', '2016', '2017', '2018', '2019', '202', '2020', '2022', '2023', '2023rugbyworldcup', '2024', '2024secondes', '2025', '2026', '2027', '2028', '2030', '2032', '205', '20h00', '20h2017', '20h55', '20k', '20minutes', '21', '210', '2108', '21h', '22', '22m', '23', '234', '23h', '24', '250', '25ans', '26', '260000', '27', '270', '270000', '270k', '28', '28th', '29', '2constater', '2e', '2eme', '2h', '2km', '2la', '2milliards', '2passion', '2plus', '2x', '3', '30', '300', '3000', '30km', '32', '33', '34', '34m', '35', '350', '36', '360', '38', '39', '395', '39e', '3e', '3eme', '3md', '3minutes', '3mois', '3x3', '4', '400', '400m', '40e', '41', '45', '450', '45326', '457', '45957', '46021', '46047', '46053', '46149', '46181', '46747', '46811', '46907', '46971', '47', '47442', '48137', '48521', '48768', '48832', '49', '49152', '49287', '4e', '4mds', '4mp1ldm', '4x10km', '5', '50', '500', '5000', '500m', '500millions', '519', '52', '53', '532', '56', '57', '59', '5e', '5g', '5k', '5km', '5m93', '6', '60', '600', '6000', '620', '65mds', '66minutes', '6ans', '6e', '6eme', '6mds', '6milliards', '6nations', '6s', '6semaines', '7', '70', '700000', '70k', '72', '75', '77', '78000', '7801', '7ans', '7mds', '8', '80', '800', '83', '84', '85000', '8h', '8h10', '8mds', '9', '90', '90k', '92', '93', '94', '95', '97', '971', '98', '9h', 'a', 'a1', 'a35mn', 'a380', 'abandonne', 'abandonner', 'abat', 'abattage', 'abeillezvous', 'aberration', 'abonnes', 'aborde', 'aborder', 'aboutir', 'abracadabrantesque', 'abri', 'absence', 'absengorgement', 'absolument', 'ac', 'acceder', 'accederont', 'acceler', 'accelerateur', 'acceleration', 'accelere', 'accelerent', 'accelerer', 'acceleres', 'accellerateur', 'acceptent', 'accepter', 'accepterait', 'acces', 'accessibilite', 'accessible', 'accessibles', 'accidents', 'accompagne', 'accompagnee', 'accompagnement', 'accompagner', 'accompli', 'accomplie', 'accord', 'accordde', 'accordent', 'accorder', 'accorhotels', 'accred', 'accredites', 'accroissement', 'accu', 'accueil', 'accueille', 'accueillera', 'accueilli', 'accueillir', 'accueillis', 'accuellir', 'accumule', 'accuse', 'achat', 'achete', 'acheter', 'acheve', 'achevee', 'acheves', 'acquerir', 'acquis', 'acte', 'acter', 'actes', 'acteu', 'acteur', 'acteurdedemain', 'acteurs', 'action', 'actions', 'activations', 'active', 'activent', 'activite', 'actors', 'actrice', 'actu', 'actualite', 'actualites', 'actuel', 'actuelles', 'actus', 'ad', 'adapter', 'addictions', 'addition', 'additionnelles', 'additionnels', 'adeptes', 'adequat', 'adidas', 'adieu', 'adj', 'adjoint', 'adjoints', 'admin', 'administration', 'administres', 'admirable', 'adn', 'adopte', 'adopterons', 'adoptez', 'adoption', 'adoraaaable', 'adore', 'adp', 'adresse', 'adversaire', 'aere', 'aerogare', 'aeroport', 'aeroports', 'aff', 'affaire', 'affaires', 'affecte', 'affiche', 'afficher', 'affichetoncoeur', 'afin', 'afondlaforme', 'afp', 'afrique', 'after', 'ag', 'agapes', 'agathe', 'agenda', 'agents', 'ages', 'agi', 'agir', 'agissez', 'agit', 'agonie', 'agora', 'agrandir', 'agrandissement', 'agreable', 'agress', 'agresse', 'agressee', 'agression', 'agressions', 'agricoles', 'aguerris', 'agure', 'ah', 'ahahahaha', 'ai', 'aia', 'aide', 'aider', 'aie', 'aient', 'aille', 'ailleurs', 'aime', 'aimerai', 'aimerais', 'aimerait', 'aimes', 'ainsi', 'air', 'airbnb', 'airbus', 'aises', 'ait', 'ajouter', 'ajoutons', 'ajustements', 'akbar', 'al', 'aladin', 'alain', 'alarmant', 'alaune', 'albert', 'alberti', 'alberville', 'album', 'aleatoires', 'alecole', 'aler', 'alerte', 'alertent', 'alerter', 'alertes', 'alertons', 'alfortville', 'algoe', 'aligne', 'alignes', 'allah', 'allaire', 'allait', 'allemagne', 'allemand', 'allemands', 'aller', 'alles', 'allez', 'allezlesbleu', 'allezlesbleus', 'alliage', 'alliages', 'alliant', 'allie', 'allies', 'allons', 'alloue', 'allstarperche', 'allument', 'allumer', 'allumera', 'alo', 'alors', 'alsace', 'alternatif', 'altitude', 'alwayssmile', 'amas', 'amateur', 'amateurs', 'ambassadeur', 'ambassadeurs', 'ambassadrice', 'ambiance', 'ambiances', 'ambitieux', 'ambition', 'ambitions', 'ambroise', 'ame', 'amelie', 'amelioration', 'ameliore', 'ameliorer', 'amenagement', 'amenagements', 'amenages', 'amende', 'americain', 'americains', 'amerigovespucci', 'amerique', 'amf21', 'ami', 'amir', 'amis', 'amman', 'ammour', 'amont', 'amos', 'amour', 'amp', 'amputer', 'an', 'analyse', 'analyser', 'anati', 'ancien', 'ancienne', 'anciens', 'and', 'andalouse', 'anerie', 'anestaps', 'angelamerkel', 'angeles', 'angers', 'anglais', 'angrus', 'animateur', 'animateurs', 'animation', 'animaux', 'anime', 'animer', 'animera', 'ann', 'anna', 'annaelle', 'anne', 'anneaux', 'annecy', 'annecy2018', 'annee', 'annees', 'annehidalgo', 'anniversaire', 'annoncait', 'annonce', 'annoncee', 'annoncees', 'annoncent', 'annoncer', 'annoncera', 'annoncerons', 'annonces', 'annoncez', 'annuel', 'annuelle', 'annuels', 'annulation', 'annule', 'annulee', 'annuler', 'annules', 'annus', 'anon', 'anonymement', 'anormalement', 'anouck', 'anous', 'anouslesjeux', 'ans', 'antennes', 'anthony', 'anti', 'anticiper', 'anticorruption', 'antiderapage', 'antidopage', 'antifa', 'antilles', 'antiprog', 'anxieux', 'aom', 'aout', 'apero', 'apl', 'apollon', 'apparaitre', 'appareil', 'apparemment', 'appartement', 'appartements', 'appauvrit', 'appel', 'appellent', 'appels', 'applaudir', 'applaudisse', 'applaudissements', 'apple', 'appli', 'apporte', 'apporter', 'apprecier', 'apprehension', 'apprend', 'apprendre', 'apprends', 'apprenti', 'appro', 'approche', 'appropriation', 'appropriee', 'approuves', 'appui', 'apres', 'aquatique', 'ar', 'aralympique', 'arbalettes', 'arbitrages', 'arcenciel', 'archi', 'architecte', 'architectes', 'architectural', 'architecture', 'archives', 'arcu', 'arcuei', 'ard', 'ardoise', 'are', 'arena', 'arenas', 'arg', 'argent', 'argenteuil', 'argument', 'arguments', 'arle', 'arlesien', 'armand', 'armee', 'arnaque', 'arpajon', 'arrangements', 'arras', 'arret', 'arrete', 'arreter', 'arretera', 'arretez', 'arri', 'arriere', 'arrivant', 'arrive', 'arrivent', 'arriver', 'arrivera', 'arrives', 'arrivons', 'arrondissement', 'arrondt', 'art', 'arte', 'arthur', 'artic', 'article', 'articles', 'articulation', 'articuler', 'artificiel', 'artisan', 'artisanat', 'artiste', 'artistes', 'artistique', 'as', 'asiatique', 'asie', 'askestanguet', 'asmonaco', 'asnelles', 'asphyxie', 'assemblee', 'assez', 'assigne', 'assimiles', 'assises', 'assiste', 'assister', 'assmann', 'asso', 'associat', 'associatif', 'association', 'associations', 'associee', 'associees', 'associer', 'associes', 'assos', 'assumer', 'assurancechomage', 'assure', 'assurement', 'assurer', 'assurera', 'assures', 'asthme', 'astuce', 'atelier', 'ateliers', 'atheltes', 'athenes', 'athetisme', 'athle', 'athlete', 'athletes', 'athletisme', 'atout', 'atouts', 'attachement', 'attaque', 'attaques', 'atteindre', 'atteint', 'attend', 'attendais', 'attendait', 'attendant', 'attendent', 'attendez', 'attendons', 'attendre', 'attends', 'attendu', 'attendues', 'attendus', 'attentas', 'attentat', 'attentats', 'attente', 'attention', 'atterrissage', 'attirer', 'attractifs', 'attractive', 'attractives', 'attractivite', 'attrib', 'attribue', 'attribuee', 'attribuer', 'attribues', 'attribution', 'attributions', 'attriste', 'au', 'aubaine', 'aubervillers', 'aubervilliers', 'aubry', 'aucun', 'aucune', 'audace', 'audencia', 'audiovisuel', 'audiovisuels', 'audois', 'augmentation', 'augmente', 'augmenter', 'augmentera', 'aujour', 'aujourd', 'aujourdhui', 'aulsc', 'aupres', 'aura', 'aurai', 'auraient', 'aurais', 'aurait', 'aurez', 'aurons', 'auront', 'aussi', 'autant', 'auteuil', 'authentique', 'autissier', 'auto', 'autoderision', 'autographes', 'automatique', 'automne', 'autonomes', 'autorises', 'autoroutes', 'autou', 'autour', 'autr', 'autre', 'autrement', 'autres', 'auverlot', 'aux', 'av', 'avaient', 'avais', 'avait', 'avaler', 'avalises', 'avan', 'avance', 'avancees', 'avancement', 'avancer', 'avances', 'avant', 'avantage', 'avc', 'ave', 'avec', 'avecpoleemploi', 'avenir', 'aventure', 'avertissement', 'avertit', 'aveu', 'aveuglement', 'aveugles', 'avez', 'avgeekfr', 'avi', 'avion', 'avions', 'aviron', 'avis', 'avocat', 'avoir', 'avons', 'avouer', 'avril', 'axe', 'axes', 'ayant', 'b', 'ba', 'bach', 'back', 'bacques', 'bagmontana', 'bagnolet', 'bah', 'baignable', 'baigner', 'baignoire', 'bail', 'bailleurs', 'baiser', 'baisse', 'baisses', 'balade', 'baladent', 'balance', 'ball', 'balle', 'banal', 'bande', 'bands', 'banksters', 'banlieu', 'banlieue', 'bannir', 'banquier', 'bar', 'barbarie', 'barcamp', 'barnum', 'barre', 'barrieres', 'barriereshonte', 'barry', 'barsacq', 'barth', 'bas', 'base', 'bases', 'bashing', 'bassin', 'bastilleday', 'bat', 'bataclan', 'bataille', 'bateau', 'batiment', 'batteur', 'battre', 'battue', 'baule', 'baver', 'bayle', 'bcp', 'bd', 'bdnumerique', 'beach', 'beachvolley', 'bearn', 'beau', 'beauc', 'beaucoup', 'beaujoire', 'beaumont', 'beaute', 'beaux', 'bec', 'beckers', 'bedier', 'beh', 'bejing2022', 'bel', 'belgique', 'belle', 'belles', 'ben', 'benabb', 'bene', 'benef', 'benefice', 'benefices', 'beneficie', 'beneficier', 'beneficieront', 'benevolat', 'benevole', 'benevoles', 'benjamin', 'benrabia', 'bercy', 'berge', 'bergesdelaseine', 'berlin', 'bernard', 'bernardlapasset', 'berny', 'besoin', 'besoins', 'best', 'bestteam', 'betail', 'beton', 'betonnage', 'bets', 'bfmfoot', 'bfmtv', 'biais', 'bianche', 'biarritz', 'biathlon', 'bidon', 'bie', 'bien', 'bienetre', 'bientat', 'bientot', 'bienvenue', 'bienvenuealhotel', 'bigballerbrand', 'bigdata', 'bilan', 'billard', 'billet', 'billets', 'binom', 'biodiversite', 'bis', 'bisous', 'bites', 'biz', 'bizarre', 'bizarrement', 'bjarkeingels', 'bjr', 'black', 'blague', 'blanc', 'blanche', 'blancs', 'blason', 'ble', 'blesoise', 'bleu', 'bleues', 'bleus', 'blocagerungis', 'bloch', 'block', 'blocks', 'blog', 'blonde', 'bloquer', 'blr', 'blueroom', 'blues', 'bmx', 'bnp', 'bobards', 'bois', 'boite', 'boites', 'bol', 'bon', 'bonheur', 'boniface', 'bonjour', 'bonne', 'bonnes', 'bonnnn', 'bonplanmaraude', 'bons', 'bonsoir', 'boom', 'booskapress', 'boost', 'booster', 'bord', 'bordayl', 'bordeaux', 'bordel', 'borne', 'bosse', 'bosser', 'boston', 'boston2024', 'botanique', 'bouadjadja', 'boubou', 'bouc', 'bouche', 'boucher', 'bouches', 'bouchez', 'bouchons', 'boucle', 'bouclera', 'bouees', 'bouge', 'bouger', 'boule', 'boules', 'boulot', 'bourdes', 'bourdindirect', 'bourg', 'bourget', 'bourgogne', 'bouse', 'bouseux', 'bout', 'bouyg', 'box', 'boxe', 'boy', 'boycott', 'boycotte', 'boys', 'bp', 'braderie', 'braderiedelille', 'braderielille', 'braillard', 'brainstorming', 'bras', 'brasilia', 'braun', 'bravo', 'brazil', 'break', 'breaking', 'bref', 'brennus', 'bresil', 'bresiliens', 'bresse', 'bretagne', 'brevetblanc', 'brexit', 'brigittemacron', 'brillamment', 'brillante', 'brille', 'briller', 'brise', 'brizitte', 'brochette', 'bruit', 'bruits', 'brut', 'bruts', 'bschool', 'btp', 'btw', 'bubble', 'budapest', 'budapest2024', 'buddies', 'budge', 'budget', 'budgetaire', 'budgetaires', 'budgetise', 'budgets', 'budjet', 'buffet', 'bureau', 'bureaux', 'bus', 'business', 'but', 'by', 'bye', 'c', 'c8', 'ca', 'cabinets', 'cables', 'cabochon', 'cac40', 'cache', 'cachent', 'cacher', 'cadeau', 'cadre', 'cadrer', 'cadres', 'caen', 'cafe', 'cahuzac', 'caisse', 'calais', 'calamiteux', 'calendrier', 'californie', 'calme', 'camille', 'camp', 'campagne', 'campagnes', 'campus', 'canal', 'canape', 'canard', 'canaux', 'cand', 'candid', 'candidat', 'candidate', 'candidater', 'candidates', 'candidats', 'candidatu', 'canellas', 'cannabis', 'cannes', 'canoe', 'canoekayak', 'canope', 'canousconcerne', 'cantwait', 'canyon', 'cap', 'capable', 'capables', 'capacite', 'capharnaum', 'capitale', 'capitalistes', 'capoter', 'caprice', 'capteur', 'car', 'carabine', 'carettej', 'carinne', 'carriere', 'carte', 'cartes', 'cartographie', 'carton', 'cartonne', 'cartoon', 'cas', 'casaitalia', 'cascad', 'cashinvestigation', 'casier', 'casser', 'casseurs', 'castagnette', 'castaldi', 'castaner', 'castex', 'cat', 'cata', 'catastrophe', 'catastrophes', 'categorie', 'cathedrales', 'catholic', 'catholique', 'cauchemard', 'cause', 'causer', 'causes', 'caution', 'cavecchi', 'cavous', 'cazeneuve', 'cbre', 'cc', 'cccp', 'cci', 'cd93', 'cdanslair', 'cdenquete', 'cdg', 'cdgexpress', 'cdm', 'cdm2019', 'cdm2023', 'cdmhand2017', 'cdos', 'cdos21', 'ce', 'ceci', 'cecifoot', 'ceeso', 'ceinture', 'ceis', 'cela', 'celebr', 'celebration', 'celebrations', 'celebre', 'celebree', 'celebrer', 'celle', 'celui', 'censee', 'censees', 'centaines', 'centenaire', 'centrale', 'centre', 'centreaquatique', 'centres', 'ceo', 'cer', 'cerebres', 'ceremo', 'ceremonie', 'ceremoniedecloture', 'ceremoniedouverture', 'ceremonies', 'cergy', 'certain', 'certainement', 'certaines', 'certains', 'certification', 'certifier', 'ces', 'cesar', 'cesar2018', 'cesars2018', 'cesse', 'cessent', 'cesser', 'cest', 'cestlahontequand', 'cet', 'ceta', 'cetait', 'cette', 'ceux', 'cfcross', 'cgedd', 'cgt', 'ch', 'chacun', 'chaine', 'chaines', 'chaises', 'chaleureux', 'challenge', 'challenges', 'chambre', 'chambres', 'champ', 'champagne', 'champion', 'championnat', 'championnats', 'championne', 'championnes', 'champions', 'champs', 'chance', 'chances', 'change', 'changement', 'changer', 'changera', 'changez', 'chanson', 'chansons', 'chant', 'chantage', 'chantait', 'chante', 'chanter', 'chantera', 'chanterait', 'chanteur', 'chanteurs', 'chantier', 'chantiers', 'chantilly', 'chaos', 'chapeau', 'chapelle', 'chapoy', 'chaque', 'charente', 'charge', 'chargee', 'chargees', 'charges', 'chargeyourphone', 'charia', 'charles', 'charniere', 'charte', 'chasse', 'chasseurs', 'chateau', 'chatelet', 'chaud', 'chauds', 'chauvine', 'checker', 'chef', 'chefs', 'chelem', 'chemin', 'cheminots', 'cheque', 'chequier', 'cher', 'cherche', 'chercher', 'chere', 'cheres', 'cheri', 'cheris', 'chers', 'cheveux', 'chevre', 'chez', 'chiant', 'chiantesque', 'chic', 'chiffrage', 'chiffredelasemaine', 'chiffres', 'children', 'chine', 'chinois', 'chipster', 'chirac', 'chirurgien', 'chloe', 'choc', 'choisi', 'choisir', 'choisirait', 'choisis', 'choisissant', 'choisissent', 'choisissez', 'choix', 'chomage', 'chomeurs', 'chomis', 'choper', 'choquant', 'choque', 'choquee', 'choree', 'choregraphe', 'chose', 'choses', 'chpts', 'chretiens', 'christ', 'christin', 'christine', 'christinearron', 'christophe', 'ci', 'cible', 'cice', 'cidj', 'ciel', 'cimade', 'cinema', 'cingles', 'cinq', 'cinquieme', 'circ', 'circenses', 'circo', 'circonscription', 'circulaire', 'circulaires', 'circulation', 'circuler', 'circus', 'cirque', 'citation', 'cite', 'citedeleconomie', 'citizen', 'citizens', 'citoyenne', 'citoyennete', 'citoyens', 'city', 'cityzenmap', 'civil', 'civile', 'ck', 'clair', 'claire', 'clairement', 'clairoix', 'clairs', 'clameurs', 'clandestins', 'claque', 'claquee', 'claquement', 'claquer', 'clarte', 'clash', 'classe', 'classement', 'classes', 'classiquematin', 'claude', 'claudeonesta', 'cle', 'clef', 'clefs', 'clermontferrand', 'clhebdo', 'cliche', 'cliches', 'clichy', 'client', 'clients', 'climat', 'climateaction', 'climatiques', 'clip', 'cliquez', 'clivages', 'cloture', 'cloturer', 'club', 'clubs', 'cluster', 'cluzel', 'cm', 'cm1', 'cm2', 'cmdijon', 'cmp', 'cnds', 'cnosf', 'cnp', 'co', 'co2', 'coca', 'coeur', 'coffret', 'coherence', 'coicault', 'coin', 'coince', 'cojo', 'cola', 'colere', 'collab', 'collaborations', 'collaborative', 'collect', 'collectes', 'collectif', 'collectifs', 'collection', 'collective', 'collectivement', 'collectives', 'collectivite', 'collectivites', 'college', 'collegiens', 'collegues', 'colline', 'collomb', 'colloque', 'colloquefiphfp', 'collterr', 'collterrs', 'colomb', 'colombes', 'colombes2024', 'colonie', 'colonne', 'colossal', 'com', 'comb', 'combat', 'combats', 'combattant', 'combien', 'combine', 'comble', 'combler', 'combo', 'comedie', 'comingsoon', 'comite', 'comites', 'comm', 'commande', 'commander', 'commandes', 'comme', 'commencant', 'commence', 'commencent', 'commencer', 'commencons', 'comment', 'commentair', 'commentaire', 'commentaires', 'commerce', 'commercial', 'commercialisable', 'commissi', 'commission', 'commissions', 'commun', 'commune', 'communication', 'communion', 'communique', 'communiquer', 'communistes', 'communs', 'compacite', 'compagnie', 'companies', 'company', 'comparait', 'compense', 'competence', 'competences', 'competitif', 'competitio', 'competition', 'competitions', 'compiegne', 'complement', 'complementaires', 'complementarite', 'complet', 'complete', 'completement', 'compost', 'comprend', 'comprendra', 'comprendre', 'comprends', 'compris', 'compromet', 'compromis', 'compta', 'comptable', 'compte', 'compter', 'comptes', 'comptez', 'comptons', 'con', 'concentre', 'concentrer', 'concept', 'concerme', 'concernant', 'concernees', 'concernent', 'concernes', 'concert', 'concertation', 'concerts', 'concevoir', 'concevons', 'conclue', 'conclusion', 'conclusions', 'concourir', 'concours', 'concret', 'concretement', 'concrets', 'concu', 'concurrence', 'concurrent', 'concurrents', 'condit', 'condition', 'conditionne', 'conditionnel', 'conditions', 'conduire', 'conf', 'confbb', 'conference', 'confiance', 'confiant', 'confier', 'confirmation', 'confirme', 'confirmee', 'confirmees', 'confirmer', 'confirmera', 'confirmes', 'confnpa', 'confond', 'confortable', 'confrontee', 'conges', 'congolais', 'congrats', 'congres', 'congresmev', 'conjuguer', 'connais', 'connaissant', 'connaissent', 'connait', 'connaitre', 'connectee', 'connectees', 'connerie', 'conneries', 'connu', 'cons', 'consacre', 'consacree', 'consciences', 'conseil', 'conseilde', 'conseille', 'conseiller', 'conseillere', 'conseilregionalcentrevaldeloire', 'conseils', 'consensus', 'consequence', 'consequences', 'conservant', 'considerable', 'considerablement', 'considerables', 'consideration', 'considere', 'consigny', 'consiste', 'consortium', 'consorts', 'constat', 'constater', 'constates', 'construction', 'construire', 'construisait', 'construisez', 'construisons', 'construit', 'construite', 'construites', 'consultant', 'consultation', 'consulting', 'conte', 'content', 'contente', 'contents', 'contenu', 'contestataire', 'context', 'contexte', 'continent', 'continue', 'continuer', 'continuerai', 'continuons', 'contorsionnent', 'contourner', 'contraignant', 'contraignante', 'contraire', 'contraires', 'contrat', 'contre', 'contrer', 'contri', 'contrib', 'contribuable', 'contribuables', 'contribue', 'contribuer', 'contributeurs', 'contribution', 'contributions', 'controle', 'convaincre', 'convaincu', 'convention', 'convie', 'conviee', 'conviendra', 'convier', 'convivialite', 'convoque', 'convoquer', 'cool', 'coop', 'cooperation', 'coordination', 'cop21', 'copains', 'copie', 'copieux', 'copresidents', 'coree', 'cornel', 'corporate', 'correctement', 'correspondance', 'corrompus', 'corruption', 'cortege', 'cosma', 'cost', 'costauds', 'cotations', 'cote', 'cotes', 'cou', 'coubertin', 'coucou', 'coudray', 'coue', 'couilles', 'couillons', 'couine', 'couler', 'couleu', 'couleur', 'couleurs', 'coulisses', 'couloir', 'coup', 'coupe', 'coupedumonde2018', 'coupedumonde2019', 'coupedumonde2023', 'coupedumondefeminine2019', 'coupedumonderugby2023', 'coupee', 'coupes', 'couple', 'coups', 'cour', 'courage', 'courant', 'courir', 'courneuve', 'courrier', 'cours', 'course', 'courses', 'court', 'courtois', 'courtoisie', 'courts', 'couru', 'cout', 'couta', 'coute', 'couter', 'couteux', 'couts', 'couvert', 'couverture', 'couvrir', 'couvrirait', 'cpena', 'cr', 'crack', 'crade', 'crai', 'craignait', 'craindre', 'crains', 'craint', 'craintes', 'cran', 'cranes', 'craquer', 'creatif', 'creatifs', 'creation', 'creations', 'creative', 'creativite', 'creatrices', 'credibiliser', 'credit', 'cree', 'creees', 'creen', 'creer', 'crees', 'creil', 'creneaux', 'crepsidf', 'crever', 'cri', 'cridf', 'crier', 'critique', 'crocodiles', 'croire', 'crois', 'croise', 'croisee', 'croissance', 'croissant', 'croit', 'croitre', 'croix', 'cros', 'cross', 'crosscountry', 'crossisf', 'crossunss', 'croyable', 'croyais', 'croyez', 'croyons', 'cru', 'cruciale', 'crue', 'crypto', 'cryptomonnaie', 'csa', 'csg', 'ct', 'cubem', 'cuit', 'cul', 'culbutes', 'culbuto', 'culture', 'culturel', 'culturels', 'cup', 'cur', 'curieux', 'curry', 'cybersecurite', 'cyclables', 'cycle', 'cyclismactu', 'cyclisme', 'cyclos', 'cymes', 'd', 'd1', 'd1f', 'd2s', 'dab', 'daccompagnement', 'daccord', 'daccueillir', 'dactions', 'dadministration', 'daesh', 'daft', 'daigne', 'dallard', 'daltons', 'damateurs', 'damenagement', 'dames', 'damien', 'damso', 'dan', 'danemark', 'danger', 'dangers', 'daniel', 'danielle', 'dans', 'danse', 'danser', 'dapprendre', 'data', 'datagueule', 'datant', 'dataviz', 'date', 'dates', 'dathletisme', 'datouts', 'dattente', 'daugmenter', 'dautres', 'davantage', 'davenir', 'david', 'davis', 'davoi', 'davoir', 'day', 'ddouillet', 'de', 'deadline', 'dealers', 'deash', 'deau', 'debat', 'debats', 'debattre', 'debattue', 'debordements', 'debout', 'debut', 'debutent', 'debuts', 'decalage', 'decale', 'decalee', 'decathlon', 'decede', 'decembre', 'decents', 'decernee', 'dechet', 'decide', 'decidement', 'decider', 'decideur', 'decideurs', 'decisif', 'decision', 'decisions', 'declare', 'declasser', 'declencheur', 'declin', 'decloisonne', 'decoles', 'decor', 'decoree', 'decors', 'decoupe', 'decouvertedesoi', 'decouvre', 'decouvrez', 'decouvrir', 'decroche', 'decrocher', 'decroches', 'decrypte', 'decu', 'decyclisme', 'dedans', 'dedies', 'dedommagement', 'deduis', 'defaillant', 'defaillante', 'defaite', 'defaut', 'defend', 'defendon', 'defendre', 'defense', 'defi', 'deficit', 'deficits', 'defilees', 'definit', 'definitif', 'definitive', 'definitivement', 'defiscaliser', 'degage', 'degats', 'degoute', 'degoutee', 'degouter', 'degradation', 'degradee', 'degueu', 'deguisee', 'dehors', 'deja', 'dejeuner', 'dela', 'delai', 'delais', 'delavillenie', 'delegation', 'delegations', 'delegue', 'deleves', 'delicats', 'delinquants', 'delire', 'delocalises', 'deloge', 'delyx', 'dem', 'demain', 'demande', 'demandent', 'demander', 'demandez', 'demarche', 'demarre', 'demarrer', 'demenager', 'demesure', 'demi', 'demission', 'democratie', 'democraties', 'democratique', 'democratiser', 'demolir', 'demonstration', 'demontre', 'demontrer', 'demotions', 'demploi', 'demplois', 'demultiplier', 'deni', 'denier', 'deniers', 'denis', 'denismasseglia', 'denoncer', 'denormandie', 'dense', 'dents', 'denvoi', 'depart', 'departement', 'departemental', 'departementaux', 'departements', 'depasse', 'depasseme', 'depassement', 'depassements', 'depasser', 'depasserait', 'depen', 'depense', 'depenser', 'depenses', 'depensons', 'depit', 'deplacement', 'deplacent', 'deploiement', 'deploiera', 'deplorable', 'deplorent', 'deployer', 'deposee', 'deposer', 'depuis', 'depute', 'dequipements', 'der', 'derapage', 'derapages', 'derape', 'deraper', 'derive', 'derives', 'dernier', 'derniere', 'dernieres', 'derniers', 'derogation', 'derogations', 'derogatoire', 'deroulement', 'deroulent', 'derouler', 'deroulera', 'derouleront', 'derriere', 'des', 'desapprobation', 'desastre', 'desastreuse', 'descalade', 'descendants', 'descendre', 'descente', 'desden', 'desesperance', 'deshabiller', 'designation', 'designe', 'designee', 'designer', 'designsprint', 'desinformation', 'desir', 'desirable', 'desirer', 'desole', 'desordre', 'desormais', 'dessert', 'desservira', 'dessin', 'dessine', 'dessinent', 'dessous', 'dessus', 'destination', 'destine', 'destinee', 'destinees', 'destines', 'destruction', 'det', 'detablis', 'detablissement', 'detail', 'details', 'dete', 'detection', 'detections', 'detendus', 'detenus', 'determinants', 'detour', 'detr', 'detre', 'detriment', 'detrousse', 'dette', 'dettepublique', 'dettes', 'detudes', 'deur', 'deurope', 'deuros', 'deux', 'deuxieme', 'dev', 'devait', 'devant', 'devastateur', 'developpement', 'developpementdurable', 'developper', 'devenir', 'devenu', 'devenue', 'devenus', 'devienne', 'deviennent', 'devient', 'devoilant', 'devoile', 'devoiler', 'devoir', 'devons', 'devra', 'devraient', 'devrait', 'devriez', 'devront', 'dexcedent', 'dexception', 'dexperts', 'dexpropriation', 'dg', 'dhabitants', 'dhabitude', 'dhafer', 'dhaiby', 'dheloise', 'diable', 'dialoguer', 'diars', 'dici', 'dictature', 'did', 'didier', 'dieppe', 'dieu', 'dieux', 'differences', 'differents', 'difficile', 'difficiles', 'difficulte', 'diffuse', 'diffuser', 'diffuseurs', 'diffusion', 'digere', 'digerer', 'digital', 'digitalisation', 'digne', 'dignement', 'dijon', 'dijonla', 'dimanche', 'dimaria', 'dimension', 'diminution', 'din', 'dinclusion', 'diner', 'dinfographie', 'dinfr', 'dinfrastructures', 'dingue', 'dinguerie', 'dinner', 'dinquietude', 'dintegration', 'dinterlogement', 'dinvestissement', 'dinvestissements', 'dinvitation', 'dionysiennes', 'diplo', 'diplomatie', 'dir', 'dira', 'dirais', 'dirait', 'dircab', 'dircom', 'dire', 'direct', 'directan', 'directement', 'directeur', 'directeurs', 'directidf', 'direction', 'directpr', 'directrice', 'dirigeants', 'dirigents', 'dis', 'disais', 'disait', 'discipline', 'disciplines', 'discordantes', 'discours', 'discriminations', 'discussion', 'discussions', 'discuter', 'disent', 'disleaanne', 'disney', 'disneyland', 'disneytousenforme', 'disparaissen', 'dispendieux', 'dispo', 'disponible', 'dispose', 'disposer', 'disposi', 'dispositif', 'dispositions', 'disqualification', 'distance', 'distraction', 'dit', 'dites', 'divers', 'diverses', 'diversite', 'dix', 'dizaines', 'dizieux', 'dm', 'dma', 'dnb', 'do', 'doffres', 'doigts', 'dois', 'doit', 'doivent', 'domaine', 'dominer', 'dommage', 'dompte', 'donc', 'donnait', 'donnant', 'donne', 'donnee', 'donnees', 'donnent', 'donner', 'donnez', 'dons', 'dont', 'dopage', 'doper', 'dopes', 'dopinion', 'dor', 'dorer', 'dorganisation', 'dorganiser', 'dorientation', 'dorment', 'dormir', 'doses', 'doss', 'dossier', 'dossiers', 'dotation', 'dotations', 'dotee', 'douane', 'double', 'doubler', 'doublette', 'doubs', 'douce', 'doucesoiree', 'douche', 'doue', 'doute', 'doutes', 'douteux', 'douverture', 'douvrir', 'dptdunord', 'drames', 'dranceen', 'dranceens', 'drancy', 'drapeau', 'drapeaux', 'dream', 'droit', 'droite', 'droits', 'droitstv', 'drole', 'drolement', 'ds', 'dsden', 'dsl', 'dtn', 'du', 'du71', 'dubai', 'dubi', 'ducasse', 'dugny', 'dumont', 'dun', 'dune', 'duo', 'dupe', 'dur', 'durabilite', 'durable', 'durables', 'durant', 'dure', 'durs', 'dutilite', 'dvb', 'dynamique', 'dynamiser', 'dysfonctionnement', 'e', 'e1', 'e1matin', 'e1we', 'eau', 'eboueurs', 'ecart', 'ecarte', 'ecarts', 'echange', 'echangent', 'echangeons', 'echanger', 'echanges', 'echapper', 'echeances', 'echec', 'echelle', 'echogeo', 'echoient', 'echos', 'eclaircissements', 'eclat', 'eclatant', 'eco', 'ecole', 'ecoles', 'ecoliers', 'ecologie', 'ecologique', 'ecologistes', 'ecolos', 'ecomobilite', 'econom', 'economie', 'economiecirculaire', 'economies', 'economiqu', 'economique', 'economiquement', 'economiques', 'economiser', 'economiste', 'ecope', 'ecosysteme', 'ecoutant', 'ecoute', 'ecouter', 'ecoutez', 'ecran', 'ecrans', 'ecrire', 'ecrit', 'ectac', 'edf', 'edition', 'edito', 'editorialistes', 'educ', 'educa', 'educatif', 'education', 'educative', 'eduquer', 'eenice2018', 'efface', 'effacer', 'effectivement', 'effectuent', 'effel', 'effervescence', 'effet', 'effets', 'efficace', 'effort', 'efforts', 'egalement', 'egalite', 'egalitefh', 'eglise', 'ego', 'eh', 'eiffel', 'ejectable', 'el', 'elaboree', 'elan', 'elancourt', 'election', 'electri', 'electrique', 'electron', 'elegant', 'elegante', 'elemen', 'element', 'elements', 'elephants', 'eleves', 'eleveurs', 'elh', 'elite', 'elixir', 'elle', 'elles', 'ellysee', 'eloigne', 'eloignent', 'eloquent', 'else', 'elu', 'elus', 'elyas', 'elysee', 'elysees', 'elyx', 'emanciper', 'embarquement', 'embaucheront', 'embellir', 'eme', 'emerveille', 'emettent', 'emigration', 'emilie', 'emis', 'emissaire', 'emission', 'emissions', 'emmanuel', 'emmanuelle', 'emmanuelmacron', 'emmerder', 'emotions', 'emouvante', 'emouvoir', 'empiff', 'emplacement', 'emploi', 'emploiformation', 'emplois', 'empochent', 'emporte', 'emportees', 'emprunta', 'emprunte', 'en', 'encadrer', 'enchaine', 'encheres', 'encore', 'encou', 'encourageant', 'encourageantes', 'encouragements', 'encourager', 'end', 'endettee', 'endettement', 'endetter', 'endettes', 'endirect', 'endormi', 'endormir', 'endroit', 'enedis', 'energetique', 'energie', 'energies', 'energique', 'enfants', 'enfer', 'enfermer', 'enfin', 'enfouissement', 'enfumage', 'eng', 'engage', 'engagee', 'engagees', 'engagement', 'engagements', 'engagez', 'engendree', 'engie', 'engraisse', 'engraissent', 'engrus', 'enjeu', 'enjeux', 'enjeuxemploi', 'enjeuxemplois', 'enmarche', 'enorme', 'enormement', 'enquete', 'enrages', 'enregistrements', 'enrichir', 'enrichissent', 'enrichit', 'enseigne', 'ensemble', 'ensuite', 'entamee', 'entend', 'entendre', 'entendrez', 'entendu', 'entier', 'entiere', 'entierement', 'entouree', 'entrainait', 'entraine', 'entrainement', 'entrainements', 'entrainer', 'entrainons', 'entre', 'entree', 'entrepreneur', 'entrepreneurial', 'entrepreneurs', 'entreprise', 'entreprises', 'entrer', 'entretien', 'entretiendra', 'env', 'envahisse', 'enveloppe', 'envie', 'environ', 'environnement', 'environnemental', 'environnementale', 'envisage', 'envisagee', 'envisager', 'envisagerait', 'envisages', 'envoi', 'envoie', 'envole', 'envoye', 'envoyer', 'envoyez', 'ep', 'epargner', 'epaulee', 'epee', 'ephemere', 'epide', 'epinaysurseine', 'epine', 'episode', 'epluche', 'epoque', 'epreuve', 'epreuvedevoileolympique', 'epreuves', 'epreuvesequestres', 'eps', 'equation', 'equilibre', 'equip', 'equipe', 'equipement', 'equipements', 'equipes', 'equitable', 'equitables', 'equitation', 'erc', 'ere', 'ers', 'es', 'escalade', 'escapade', 'escient', 'esclaves', 'escomptant', 'esp', 'espace', 'espaces', 'esperant', 'espere', 'esperer', 'esperons', 'espoir', 'esprit', 'espritbleu', 'ess', 'essaie', 'essentiel', 'essonne', 'essorer', 'est', 'estanguet', 'esthetique', 'estimation', 'estime', 'estimes', 'et', 'eta', 'etabliss', 'etai', 'etaient', 'etais', 'etait', 'etape', 'etat', 'etats', 'etatsdamour', 'etatsgenerauxpolitiquedelaville', 'etc', 'etdemocratie', 'ete', 'eteint', 'eteintes', 'eternite', 'etes', 'ethics', 'ethique', 'etienne', 'etiez', 'etions', 'etoffe', 'etonnant', 'etonnante', 'etonnes', 'etouffe', 'etr', 'etranger', 'etrangere', 'etrangers', 'etranges', 'etre', 'etude', 'etudes', 'etudia', 'etudiant', 'etudiante', 'etudiantes', 'etudiants', 'etudient', 'eu', 'eugreenweek', 'euh', 'euhhh', 'euphorique', 'euro', 'euro2016', 'euro2016final', 'euro2022', 'eurobasket2015', 'eurobasket2017', 'europa', 'europacity', 'europe', 'europe1', 'europeen', 'europeennes', 'europeens', 'euros', 'euros2016', 'eurovision', 'eux', 'ev', 'evalue', 'evaluent', 'evaluer', 'evalues', 'evasionfiscale', 'eve', 'evelyne', 'even', 'eveneme', 'evenemen', 'evenement', 'evenementiel', 'evenements', 'event', 'eventprofs', 'eventres', 'events', 'eventuel', 'eventuels', 'evident', 'eviter', 'evitera', 'evolue', 'evoluer', 'evolution', 'evoque', 'evoquee', 'evoquent', 'evoquer', 'evoquerez', 'ex', 'exactement', 'exaequo', 'examen', 'exc', 'excatement', 'excellence', 'excellent', 'excellente', 'excellents', 'exception', 'exceptionnel', 'exceptionnelle', 'exceptions', 'exces', 'excite', 'exclusion', 'exclusivite', 'exclusivites', 'excuse', 'excuses', 'executif', 'exemplaire', 'exemplaires', 'exemplarite', 'exemple', 'exemples', 'exempte', 'exhortant', 'exige', 'exigee', 'exigences', 'existant', 'existe', 'exitchemises', 'exo', 'exonerer', 'experience', 'experiences', 'experimentation', 'expert', 'expertise', 'experts', 'explique', 'expliquer', 'exploitation', 'exploites', 'exploree', 'explose', 'explosifs', 'explosion', 'expo', 'expo2025', 'exposer', 'exposition', 'expouniverselle', 'express', 'exprimait', 'exprime', 'expriment', 'expropriations', 'expulses', 'extension', 'extensions', 'extinction', 'extrait', 'extraordinaire', 'f', 'fa', 'fabrice', 'facade', 'face', 'facebook', 'faceties', 'facette', 'facil', 'facile', 'facilement', 'faciliter', 'facon', 'facons', 'facron', 'facture', 'faculte', 'faiblesse', 'faim', 'faineant', 'faineants', 'fair', 'faire', 'fais', 'faisait', 'faisant', 'fait', 'faite', 'faites', 'faits', 'falcao', 'fallait', 'falloir', 'fallu', 'famille', 'familledu', 'familleolympique', 'fan', 'fanfan', 'fans', 'fantasme', 'fantasmer', 'fantastique', 'fanzone', 'fanzonetoulouse', 'fanzonetoureiffel', 'faque', 'farmer', 'farouchement', 'fasse', 'fassent', 'fatigue', 'fatiguent', 'faudra', 'faudrait', 'faur', 'fausse', 'faut', 'faute', 'fautpasrever', 'faveur', 'faveurs', 'favier', 'favori', 'favoriser', 'fc', 'fcais', 'fdh2017', 'fdj', 'fe', 'fecavolley', 'federateur', 'federation', 'federations', 'federatrice', 'federe', 'federer', 'feedback', 'felicitation', 'felicitations', 'felicite', 'feliciter', 'felicitions', 'feminin', 'feminine', 'feminins', 'feminisation', 'femme', 'femmepouvoitetdentelles', 'femmes', 'fenetres', 'fens2016', 'fera', 'feraient', 'ferais', 'ferait', 'feram', 'ferme', 'fermee', 'fermer', 'fermera', 'fermerture', 'fermeture', 'ferons', 'feront', 'ferree', 'ferveur', 'festive', 'fete', 'fetedelhuma', 'fetenationale', 'feter', 'fetes', 'fetons', 'feu', 'feuille', 'fevrier', 'ff', 'ffa', 'ffn', 'ffr', 'ffrollerskatebord', 'ffrs', 'fft', 'fh', 'fh2017', 'fi', 'fiasco', 'fibre', 'ficelees', 'fichent', 'fichu', 'fictif', 'fiction', 'fier', 'fiere', 'fiers', 'fiersdetrebleus', 'fierte', 'fifawwc2019', 'figaro', 'fightspirit', 'figuerer', 'figure', 'figurer', 'figures', 'filant', 'file', 'files', 'fillage', 'filles', 'film', 'filmer', 'films', 'fils', 'filtre', 'fin', 'final', 'finale', 'finalement', 'finales', 'finance', 'financement', 'financer', 'financera', 'finances', 'financier', 'financiere', 'financieres', 'financiers', 'fini', 'finie', 'finir', 'finira', 'finissait', 'finissent', 'fiori', 'fipp', 'firminy', 'fiscal', 'fiscalite', 'fixe', 'fixer', 'flambeau', 'flamber', 'flamby', 'flamme', 'flasher', 'flatter', 'flemmards', 'flessel', 'fleuret', 'flinguer', 'floorball', 'flop', 'flory', 'flou', 'floute', 'fluidifier', 'flux', 'fn', 'foiredecaen', 'fois', 'folie', 'fonciere', 'fonction', 'fonctionnaire', 'fonctionnaires', 'fonctionnels', 'fond', 'fondamental', 'fondamentaux', 'fondateur', 'fondation', 'fonds', 'font', 'fontainebleau', 'foodtech', 'foot', 'football', 'footballeur', 'for', 'force', 'forcement', 'forcer', 'forces', 'foret', 'forgesleseaux', 'form', 'forma', 'format', 'formation', 'formations', 'forme', 'former', 'formidabl', 'formidable', 'formidablement', 'formulaire', 'formule', 'fort', 'forte', 'forts', 'forum', 'fossoyeur', 'fou', 'fouet', 'fourbe', 'fourberie', 'fourcade', 'fourmille', 'fourmis', 'fourni', 'fournisseur', 'fourniture', 'fours', 'fout', 'foutais', 'foutaise', 'foutre', 'foutu', 'foyer', 'foyers', 'fp', 'fr', 'fra', 'fraall', 'fracasse', 'fracture', 'fragile', 'fraioli', 'frais', 'fran', 'franc', 'francais', 'francaise', 'francaises', 'franchement', 'franchir', 'franchise', 'franchissement', 'francilen', 'francilien', 'franciliennes', 'franciliens', 'francis', 'franco', 'francois', 'francoishollande', 'franconville', 'francophonie', 'francs', 'frapor', 'frarou', 'fraternite', 'frederic', 'free', 'freecalling', 'french', 'frenchbashing', 'frenchtech', 'frenchturism', 'frequence', 'frequent', 'frequentati', 'fric', 'frissons', 'fromage', 'fronsse', 'front', 'frontdegauche', 'frontieres', 'frousse', 'frustration', 'fuel', 'fumeurs', 'fumigenes', 'fumisterie', 'fur', 'fut', 'futsal', 'futur', 'future', 'futures', 'futuriste', 'futurs', 'g', 'g5sahel', 'ga', 'gabegie', 'gabriel', 'gache', 'gacher', 'gachis', 'gag', 'gagnant', 'gagnante', 'gagne', 'gagnent', 'gagner', 'gagnera', 'gagnes', 'gagnion', 'gagnons', 'gain', 'gaitelyrique', 'gala', 'galere', 'gallica', 'gamerz', 'games', 'gant', 'gar', 'garanti', 'garantie', 'garantit', 'garde', 'garder', 'gare', 'gargarise', 'garros', 'gars', 'gaspillage', 'gaspiller', 'gauche', 'gaudin', 'gaule', 'gaulle', 'gauthier', 'gaver', 'gayant', 'gaygames2018', 'gazon', 'gdn', 'gds', 'geant', 'geante', 'gemme', 'general', 'generale', 'generales', 'generant', 'generation', 'generations', 'generes', 'genial', 'genie', 'genovesi', 'genre', 'gens', 'gentil', 'gentiment', 'gentrification', 'geographie', 'geometres', 'georges', 'gerald', 'gerard', 'gerbant', 'gerber', 'gere', 'geree', 'gerer', 'gerke', 'germain', 'germaine', 'geste', 'gestes', 'gesti', 'gestion', 'gestionnaires', 'ggrmc', 'ghost', 'gims', 'gip', 'gironde', 'girouette', 'git', 'givelet', 'glamour', 'global', 'globalement', 'globalite', 'go', 'goes', 'goldenblocks', 'golf', 'gonfle', 'gonfler', 'google', 'googlealerts', 'gosse', 'gou', 'gouffre', 'gout', 'gouts', 'gouv', 'gouvernance', 'gouvernants', 'gouvernement', 'gouvt', 'gpe', 'gr', 'gra', 'graaande', 'graal', 'grace', 'graet', 'graines', 'gran', 'grand', 'granddebat', 'grande', 'grandement', 'grandes', 'grandir', 'grandpalais', 'grands', 'graphicdesign', 'graphistes', 'gratter', 'gratuit', 'gratuite', 'grave', 'grco', 'grds', 'grece', 'grecs', 'green', 'greve', 'greve22mars', 'grevedeseboueurs', 'greves', 'grevesncf', 'grille', 'griller', 'grince', 'grincheux', 'gris', 'grisaille', 'grognet', 'gros', 'grosse', 'grosses', 'grotesque', 'group', 'groupe', 'groupement', 'groupes', 'gt', 'guadeloupeenne', 'guepe', 'guerilla', 'guerillas', 'guerre', 'guetta', 'gueule', 'gueuler', 'gueules', 'guide', 'guignol', 'guignols', 'guillaume', 'gustave', 'guyancourt', 'guyforget', 'gvt', 'gymnase', 'gymnases', 'gymnastics', 'gymnastique', 'h', 'habi', 'habitan', 'habitant', 'habitants', 'habitat', 'habiter', 'habitude', 'hackathon', 'hackaton', 'hacking', 'haddad', 'haies', 'halle', 'halles', 'hallu', 'hallyday', 'hambourg', 'hamouri', 'hand', 'hand2017', 'handape', 'handball', 'handi', 'handicap', 'handicapes', 'handis', 'harding', 'harmonie', 'hashtag', 'hat', 'hate', 'hausse', 'haut', 'haute', 'hautes', 'hautesaone', 'hauteur', 'hauteurs', 'hauts', 'hautsdeseine', 'hc', 'hd', 'hdpros', 'hdr', 'he', 'hebdo', 'hebergement', 'hectares', 'hein', 'helenefr', 'hemiplegique', 'henard', 'henri', 'here', 'heritage', 'heriton', 'hermitage', 'hermitageplaza', 'heroesfr', 'heros', 'herve', 'hesite', 'hesites', 'hesitez', 'heu', 'heure', 'heures', 'heureuse', 'heureusement', 'heureux', 'hevc', 'hey', 'hg', 'hi', 'hidalgo', 'hidalgoa', 'hier', 'hierarchie', 'his', 'histoire', 'historique', 'historiques', 'hiver', 'hlm', 'ho', 'hoc', 'hockey', 'hollande', 'hommage', 'homme', 'hommes', 'homologue', 'homologuees', 'homophobe', 'hong', 'honnete', 'honnetement', 'honneur', 'honore', 'honte', 'honteuse', 'honteusement', 'hooligans', 'hop', 'hopital', 'hopitaux', 'horaire', 'horaires', 'horizon', 'horizons', 'horribilis', 'hors', 'hospitalite', 'hosted', 'hot', 'hote', 'hotel', 'hoteldeville', 'hotelier', 'hs', 'hslvids', 'huchon', 'hugo', 'hui', 'huit', 'huitahuit', 'hulot', 'humain', 'humeur', 'hurtis', 'hymne', 'hypocrisie', 'hypothetique', 'hypothetiques', 'hypoxiques', 'hysterique', 'i', 'iannetta', 'icanmakeitalone', 'icdnews', 'ice', 'ici', 'icss', 'ideale', 'idee', 'idees', 'identification', 'identifier', 'identifies', 'ideologie', 'idf', 'ien', 'ienne', 'iennes', 'iens', 'if', 'ifs', 'ifsderriere', 'igen', 'iger', 'igf', 'ignominie', 'ii', 'ijspf', 'il', 'ile', 'ilede', 'illegalement', 'illico', 'illustrati', 'illustre', 'ilms', 'ilove', 'ils', 'image', 'images', 'imagine', 'imaginer', 'imaginez', 'immateriel', 'immense', 'immercurien', 'immercuriens', 'immersion', 'immo', 'immobilier', 'immobiliere', 'immobiliers', 'immondices', 'immoraux', 'immoweek', 'impa', 'impact', 'impact2', 'impacter', 'impacts', 'impasses', 'impatience', 'impatient', 'implantation', 'implication', 'impliquee', 'impliquent', 'impliques', 'import', 'importance', 'important', 'importante', 'importantes', 'importants', 'importe', 'importer', 'impose', 'imposer', 'impossible', 'impots', 'impraticable', 'impregner', 'imprevisibles', 'imprime', 'improbable', 'improvisent', 'in', 'inaugurait', 'inauguration', 'inaugure', 'inbound', 'incapable', 'incapables', 'incapacite', 'incarne', 'incertitudes', 'incidents', 'incitation', 'incivilites', 'incluant', 'inclusif', 'inclusifs', 'inclusion', 'inclusive', 'incompetence', 'inconnu', 'incontournable', 'incontrolee', 'incroyable', 'incroyables', 'incubateur', 'ind', 'inde', 'indecence', 'indecent', 'independant', 'independante', 'indice', 'indien', 'indigne', 'indiquee', 'indispensable', 'industrie', 'industrielle', 'inedit', 'inedite', 'inegalites', 'influence', 'influentes', 'info', 'infographie', 'infojeunesse', 'information', 'informations', 'informe', 'informer', 'infos', 'infoz', 'infra', 'infractructures', 'infrastr', 'infrastructure', 'infrastructures', 'ingenierie', 'initial', 'initiative', 'initie', 'inities', 'injustifie', 'innovant', 'innovante', 'innovantes', 'innovants', 'innovateur', 'innovateurs', 'innovation', 'innovations', 'innover', 'innovons', 'inondable', 'inoubliabl', 'inquiet', 'inquietant', 'inquiete', 'inquietent', 'inquieter', 'inquiets', 'inquietude', 'inquietudes', 'inra', 'inscri', 'inscription', 'inscrire', 'insecurite', 'insep', 'insepiens', 'inserer', 'insincere', 'insiste', 'insolites', 'insoumise', 'inspecteurs', 'inspecti', 'inspection', 'inspections', 'inspirant', 'inspirants', 'inspire', 'inspirent', 'inspirer', 'install', 'installation', 'installations', 'instant', 'instantane', 'instantanee', 'instants', 'institut', 'institution', 'institutionnels', 'insupportable', 'integrale', 'integration', 'integrer', 'intel', 'intelligent', 'intelligente', 'intelligentes', 'intensifie', 'interactive', 'intercommunal', 'interconnexions', 'interdi', 'interdictions', 'interdir', 'interdisciplinaire', 'interdit', 'interessant', 'interessante', 'interesses', 'interet', 'interets', 'interieur', 'intermediaire', 'interministeriel', 'intern', 'internatio', 'international', 'internationale', 'internationales', 'internationaux', 'internet', 'interpelles', 'interpretee', 'interressant', 'interrogeant', 'intervalle', 'intervenants', 'intervenir', 'intervention', 'intervenue', 'interview', 'intrinseque', 'intronise', 'intuitif', 'intuition', 'inutile', 'inutilement', 'inutiles', 'invente', 'inventons', 'invest', 'investi', 'investir', 'investis', 'investisse', 'investissement', 'investissements', 'investissent', 'investisseurs', 'investit', 'invitation', 'invite', 'invitee', 'inviter', 'invites', 'inzinzaclochrist', 'ioc', 'iocgohome', 'ira', 'iref', 'irfo', 'irma', 'irmes', 'ironiquement', 'iront', 'irrealistes', 'irreproch', 'irresponsables', 'is', 'isback', 'isere', 'isfwsc2018', 'isignyomaha', 'iskenderov', 'islam', 'islamistes', 'islife', 'iso20121', 'isoles', 'it', 'itw', 'ive', 'ives', 'ivres', 'ivry', 'j', 'j01', 'j1', 'ja', 'jaccueillais', 'jack', 'jackralite', 'jai', 'jaime', 'jamais', 'jametal', 'janvier', 'japon', 'japonais', 'jappert', 'jardin', 'jaubert', 'jaune', 'jaures', 'javais', 'jb', 'jdcjdr', 'jdd', 'je', 'jean', 'jeancome', 'jeanne', 'jedemande', 'jeff', 'jen', 'jennifer', 'jentends', 'jep', 'jep2017', 'jeremiasz', 'jeremy', 'jespere', 'jesuisgrincheux', 'jetais', 'jettent', 'jeu', 'jeudi', 'jeune', 'jeunes', 'jeunesse', 'jeux', 'jeux2024', 'jeuxolympique', 'jeuxparalympiques', 'jeveux2015', 'jeveux2025', 'jij2016', 'jjoo', 'jme2016', 'jnss2016', 'jo', 'jo2014', 'jo2016', 'jo2018', 'jo2032', 'joathome', 'joclub', 'jodd', 'joff', 'johnny', 'joie', 'jolatres', 'joli', 'jolie', 'jop', 'jop2016', 'jop2024', 'jopara2024', 'jotokyo2020', 'joue', 'jouent', 'jouer', 'jouera', 'joueur', 'joueurs', 'jour', 'jourdechance', 'journal', 'journaliste', 'journalistes', 'journee', 'journeedesdroitsdesfemmes', 'journeemondialedelenvironnement', 'journeeolympique', 'journees', 'journeesdupatrimoine', 'jours', 'joyeuse', 'jp', 'jparalympiques', 'jr', 'jsp', 'jt', 'jt20h', 'judiciaire', 'judicieuse', 'judicieux', 'judo', 'judokas', 'juge', 'jugeraient', 'jugeront', 'juges', 'jugez', 'juillet', 'juin', 'jul', 'julian', 'julien', 'jumelles', 'juncker', 'jungle', 'jupiter', 'jure', 'juriscup', 'jury', 'jusqu', 'jusquau', 'jusquen', 'juste', 'justement', 'justice', 'justifications', 'justifier', 'juteuse', 'juventusrealmadrid', 'jy', 'k', 'kanam', 'kanner', 'kayak', 'kelb', 'kevinfrezhx3', 'kfr', 'kids', 'kilian', 'kilometres', 'kinshasa', 'kite', 'kitesurf', 'km', 'kohlanta', 'komox', 'kong', 'koolantha', 'koons', 'kua', 'kuros', 'l', 'l1', 'l2', 'la', 'la2024', 'la2028', 'labonneexcuse', 'laboratoire', 'laborde', 'labs', 'labsence', 'lacademie', 'lacanau', 'lacause', 'laccent', 'lacces', 'laccessibilite', 'laccompagnement', 'laccueil', 'lacets', 'lachaise', 'lache', 'lacher', 'lacombe', 'lacourneuve', 'lacqutho', 'lacroissance', 'lacroix', 'ladepeche', 'ladoumegue', 'laeroport', 'laeticia', 'laffaire', 'lagardere', 'lagence', 'laicite', 'laid', 'laide', 'lair', 'laiss', 'laisse', 'laisser', 'laissez', 'lalyceenne', 'lambassade', 'lambert', 'lambert75', 'lambition', 'lamelioration', 'lamentable', 'lamottebeuvron', 'lamottebeuvroncitedescavaliers', 'lamour', 'lampe', 'lan', 'lanc', 'lance', 'lancee', 'lancement', 'lancent', 'lancer', 'lancons', 'landais', 'landy', 'langage', 'language', 'langue', 'lanina', 'lannee', 'lannonce', 'lanternes', 'lapasset', 'laporte', 'lappel', 'lapres', 'laquelle', 'larem', 'larena', 'large', 'largement', 'largent', 'largue', 'larmes', 'larrivee', 'larrogante', 'lartiste', 'latelier', 'latin', 'lattribution', 'laube', 'laudiovisuel', 'launch', 'laura', 'laurence', 'lausanne', 'lautre', 'lavagueott', 'lavance', 'lavaur', 'lavis', 'laxe', 'lci', 'le', 'le93', 'lea', 'leader', 'lebanon', 'leblog', 'lebourget', 'lecole', 'lecomte', 'leconomie', 'lecons', 'lecture', 'lecuivre', 'led', 'leducation', 'lefigaro', 'lefilm', 'legalisons', 'legalite', 'legende', 'legerement', 'legiondhonneur', 'legislatif', 'legitime', 'legoff', 'legrand', 'legrandrdv', 'legrandshow', 'lejdd', 'lejsd', 'lel', 'lelaboration', 'lelan', 'lelite', 'lelysee', 'lemeilleurpatissier', 'lemonde', 'lemploi', 'lendemain', 'lenfance', 'lengagement', 'lenne', 'lensemble', 'lent', 'lenthousiasme', 'lentra', 'lentrainement', 'lenv', 'leognan', 'lepetit', 'lepoint', 'lepopulaire', 'lequipe', 'lerequisitoire', 'leroy', 'les', 'lescalade', 'lescrime', 'lescrimeuse', 'lesechos', 'lesmemesquitrinquent', 'lesmureaux', 'lesprit', 'lesrepublicains', 'letalk', 'letapedutour', 'letarnec', 'letat', 'letre', 'letsgo', 'lettre', 'letude', 'leuphorie', 'leur', 'leuro', 'leurope', 'leurs', 'levallois', 'levenement', 'levenementasso', 'levenementiel', 'levier', 'levolution', 'levothyrox', 'lexigence', 'lexperien', 'lexperience', 'leymarie', 'lfi', 'lheritage', 'lhistoire', 'lhonneur', 'lhorizon', 'lhospitalier', 'lhotel', 'lhypothetique', 'li', 'liaison180', 'liam', 'liampayne', 'liberaliser', 'liberalisme', 'liberation', 'liberes', 'liberte', 'libre', 'librement', 'lice', 'licence', 'lidee', 'liee', 'liees', 'lien', 'liera', 'lies', 'liesse', 'liesses', 'lieu', 'lieux', 'lifting', 'ligf', 'lights', 'ligjs', 'ligne', 'ligne14', 'ligne15', 'ligne18', 'ligne718', 'ligne8', 'ligner', 'lignes', 'ligue', 'ligue1', 'ligue1conforama', 'ligues', 'like', 'lile', 'lille', 'lima', 'limiter', 'limmobilier', 'limoges', 'limpact', 'linclusion', 'lindustrie', 'linevitable', 'linguistiques', 'linnovation', 'linspection', 'linstant', 'linteret', 'linterview', 'linvitation', 'linvite', 'liquidateur', 'lire', 'lis', 'lisez', 'lit', 'lits', 'liv', 'live', 'livingiswinning', 'livraison', 'livre', 'livrer', 'liza', 'lmp', 'lo', 'lobbies', 'lobbying', 'lobbyiste', 'lobbyistes', 'lobtention', 'local', 'locales', 'locaux', 'loccasion', 'loffre', 'logement', 'logements', 'loger', 'logic', 'logiq', 'logique', 'logo', 'logos', 'loi', 'loin', 'loire', 'loiretcher', 'lois', 'loisirs', 'loitravail', 'loitravailxxl', 'lol', 'lola', 'lolympiade', 'lolympisme', 'lon', 'londres', 'londres2012', 'long', 'longtemps', 'longue', 'longues', 'look', 'lookea', 'loooooose', 'lopez', 'lopportunite', 'lorant', 'lordre', 'lorganisation', 'lors', 'lorsqu', 'lorsque', 'los', 'losangeles2024', 'louane', 'loupbureau', 'loupe', 'louradour', 'lourd', 'lourdement', 'lourds', 'louve', 'louvrage', 'louvre', 'love', 'low', 'lozere', 'lp', 'lr', 'lrem', 'ls', 'lsc', 'lt', 'ltd', 'lu', 'luc', 'lucas', 'lucide', 'lucie', 'lufuanitu', 'lui', 'lumiere', 'lumieres', 'lund', 'lundi', 'lune', 'lurbanisme', 'lutte', 'lutter', 'luxe', 'lvmh', 'lvq', 'lycee', 'lyceenne', 'lyceennes', 'lyceens', 'lycees', 'lyon', 'lyonnaise', 'm', 'm5s', 'ma', 'machine', 'macron', 'madame', 'madoff', 'madrid', 'magasin', 'magazine', 'magifique', 'magique', 'magiques', 'magnifique', 'magouill', 'mahe', 'mahiedine', 'mai', 'maif', 'maifrun', 'maigrot', 'mail', 'maillot', 'main', 'maintenance', 'maintenant', 'maintenantonagit', 'maintentant', 'maintenu', 'maintenue', 'maintient', 'maire', 'maires', 'mairie', 'mairiede', 'mais', 'maison', 'maisons', 'maitre', 'maitresse', 'maitrise', 'maitrisee', 'maitriser', 'maitrises', 'majeur', 'majeures', 'majeurs', 'makunda', 'mal', 'malades', 'malgre', 'malheureusement', 'malheureux', 'malmont', 'mamainacouper', 'management', 'mandataire', 'mandates', 'mandature', 'mang', 'manger', 'mangeurs', 'maniere', 'manif', 'manif21septembre', 'manifes', 'manifestations', 'manifestement', 'manifestent', 'manifester', 'manifs', 'manilles', 'manne', 'mannes', 'manoir', 'manq', 'manque', 'manquer', 'manuvre', 'maracana', 'marathon', 'marc', 'march', 'marchalnguyen', 'marchand', 'marchandises', 'marche', 'marcher', 'marches', 'marchespublics', 'mardi', 'marge', 'maria', 'marianne', 'maribor', 'marie', 'mariejoforever', 'marin', 'marinakaye', 'marine', 'marins', 'maritime', 'mark', 'market', 'marketi', 'marketin', 'marketing', 'markterr', 'marnais', 'marne', 'marque', 'marquent', 'marquer', 'marques', 'marqueurs', 'marquise', 'marraine', 'marrant', 'marre', 'marrer', 'mars', 'marseillaise', 'marseille', 'marseille2024', 'marteau', 'martel', 'martiens', 'martin', 'martine', 'martinez', 'martinique', 'marville', 'marxdormoy', 'mascarade', 'mascaret', 'mascotte', 'mascottes', 'masculin', 'masgrau', 'massacre', 'massacres', 'masse', 'masseglia', 'masses', 'massy', 'master', 'mat', 'match', 'matchs', 'mate', 'matelas', 'mater', 'materiel', 'matheson', 'mathilde', 'mathoux', 'matiere', 'matin', 'matinal', 'matinee', 'matins', 'matth', 'mau', 'mauvais', 'mauvaise', 'max', 'maxenc', 'maximiser', 'maximum', 'maximus', 'maybe', 'mayenne', 'mazargues', 'mbappe', 'mc', 'mdp', 'mdr', 'mdrrr', 'mds', 'me', 'meanwhilein', 'meaux', 'mec', 'mecanicien', 'mecanisme', 'mecanismes', 'meck', 'mecs', 'med', 'medail', 'medaille', 'medaillee', 'medailles', 'medef', 'media', 'medias', 'mediatique', 'mediatisation', 'mediocrite', 'mediter', 'mediterraneen', 'medtaha', 'meet', 'meeting', 'meetup', 'mega', 'mehb', 'meil', 'meilleur', 'meilleure', 'meilleurs', 'mekhissi', 'melanie', 'melee', 'melenchon', 'melun', 'membre', 'membres', 'meme', 'memes', 'memoire', 'memorable', 'menace', 'menaces', 'menage', 'mene', 'menee', 'menees', 'meneur', 'mensonges', 'mensuelle', 'mensuels', 'ment', 'mental', 'mentale', 'mentalites', 'mentent', 'menti', 'mention', 'mentionnees', 'ments', 'menu', 'menuet', 'menvole', 'mepris', 'mepriser', 'mer', 'mercato', 'merci', 'mercredi', 'merde', 'merdier', 'merite', 'meritent', 'merites', 'merkel', 'merveilleux', 'mes', 'message', 'messages', 'mesure', 'mesurer', 'mesures', 'met', 'metamorphose', 'methode', 'metiers', 'metres', 'metro', 'metronews', 'metropole', 'metropoles', 'metropolitaine', 'metros', 'mettent', 'mettez', 'mettra', 'mettre', 'mettront', 'meufs', 'mev2016', 'mg', 'mi', 'michel', 'midi', 'mieux', 'mignon', 'mignonne', 'migrants', 'milieu', 'militaire', 'militons', 'millenials', 'milli', 'millian', 'milliar', 'milliard', 'milliards', 'million', 'millionaire', 'millions', 'min', 'mince', 'mine', 'mineurs', 'minimum', 'ministere', 'ministerielles', 'ministre', 'ministres', 'minori', 'minorite', 'minute', 'minutes', 'minutieusement', 'mion', 'mipim18', 'mipim2018', 'mis', 'mise', 'miss', 'mission', 'missionne', 'missionnes', 'missions', 'mit', 'mitrailles', 'mix', 'mixite', 'mixte', 'mixtes', 'mkn', 'mld', 'mlp', 'mm', 'mme', 'mn', 'mobilisa', 'mobilisation', 'mobilisatrice', 'mobilise', 'mobilisee', 'mobilisees', 'mobilisent', 'mobiliser', 'mobilises', 'mobilisez', 'mobilisons', 'mobilite', 'mobility', 'mode', 'modele', 'moderne', 'modernisee', 'moderniser', 'modernite', 'modification', 'modulaire', 'moi', 'moidevant', 'moindre', 'moins', 'moipresident', 'moirans', 'mois', 'moitie', 'moliere', 'mollky', 'mom', 'moment', 'moments', 'momie', 'mon', 'monaco', 'monde', 'mondial', 'mondial2023', 'mondiale', 'mondialisation', 'mondialise', 'mondiaux', 'monet', 'monfort', 'monnaie', 'monopoliser', 'monsieur', 'mont', 'montaigne', 'montant', 'montargis', 'montdemarsan', 'monte', 'monter', 'montmagny', 'montparnasse', 'montpellier', 'montre', 'montrer', 'montrons', 'monty', 'monvote', 'moquee', 'moral', 'morandini', 'morano', 'more', 'more2024', 'morin', 'morning', 'mort', 'mos', 'mot', 'moteur', 'motivation', 'motive', 'mots', 'mouais', 'mouille', 'mourir', 'mousquetaires', 'mousser', 'mouvem', 'mouvemen', 'mouvement', 'mouvements', 'moyen', 'moyenne', 'moyens', 'mp', 'mr', 'ms', 'mtn', 'much', 'muer', 'muhammad', 'mulhouse', 'multi', 'multicolores', 'multimodales', 'multinationales', 'multiplier', 'mun', 'munich', 'municipal', 'municipales', 'municipaux', 'munir', 'mur', 'muriel', 'murs', 'musclee', 'museification', 'musique', 'mustread', 'mutation', 'mutualiser', 'mvt', 'myeur', 'mylene', 'mystere', 'n', 'n15', 'n2', 'na', 'nabot', 'nadot', 'nage', 'nager', 'nageront', 'nageurs', 'nageuse', 'nains', 'najdovski', 'nan', 'nanjedeconne', 'nantes', 'nantis', 'nappes', 'nat', 'natati', 'natation', 'nathalie', 'nathalielevync', 'nation', 'national', 'nationale', 'nationales', 'nationalites', 'nationaux', 'nature', 'naturel', 'nautique', 'nautiques', 'nautisme', 'naves', 'navigo', 'nba', 'nddl', 'ne', 'necessaire', 'necessaires', 'necessite', 'nefastes', 'negatif', 'negativement', 'nego', 'negociation', 'negociations', 'negocie', 'neige', 'neoliberal', 'nerf', 'nes', 'nest', 'net', 'nettoie', 'nettoyage', 'nettoyer', 'neuf', 'nevergiveup', 'new', 'news', 'newsletter', 'newvideo', 'newyork', 'nexclut', 'neymar', 'nez', 'ngaleku', 'ni', 'nice', 'niceattentat', 'nicol', 'nicolas', 'nicolassarkozy', 'nids', 'nigg', 'nike', 'nilor', 'nique', 'niveau', 'niveaux', 'no', 'noir', 'nojo', 'nolwenn', 'nolwennleroy', 'nom', 'nombre', 'nombreuses', 'nombreux', 'nomination', 'nomis', 'nomme', 'non', 'nona', 'nonaux', 'nonauxjo', 'nony', 'noooooon', 'nopainnogain', 'nord', 'normal', 'normale', 'norme', 'normes', 'norvege', 'nos', 'not', 'notamment', 'note', 'notes', 'notre', 'notres', 'nou', 'noues', 'nourrir', 'nous', 'nouveau', 'nouveaux', 'nouvel', 'nouvelle', 'nouvelleaquitaine', 'nouvelleepreuve', 'nouvelles', 'now', 'ns', 'nt', 'nuance', 'nudge', 'nues', 'nuisible', 'nuit', 'nuitdurugby', 'nulle', 'numerik', 'numerique', 'numeriques', 'numero', 'nvl', 'ny', 'o', 'objectif', 'objectifs', 'obligatoire', 'observer', 'obstination', 'obtenir', 'obtention', 'obtenu', 'obtienne', 'obtiens', 'obtient', 'occasion', 'occasions', 'occidentales', 'occitanie', 'occulte', 'occulter', 'occupe', 'occuper', 'occupera', 'octobre', 'odd', 'oeufs', 'oeuvre', 'of', 'off', 'offensive', 'offert', 'offi', 'office', 'official', 'officialise', 'officiel', 'officielle', 'officiellement', 'officielles', 'officiels', 'offre', 'offres', 'offrir', 'offrons', 'offshore', 'ogre', 'oh', 'oise', 'oisienne', 'ok', 'ol', 'olivier', 'ollier', 'oly', 'olym', 'olympiades', 'olympic', 'olympicgames', 'olympico', 'olympics', 'olympics2016', 'olympiens', 'olympiq', 'olympique', 'olympiquede', 'olympiques', 'olympisme', 'om', 'ombre', 'omnibus', 'ompsg', 'on', 'oncroiselesdoigts', 'onekites', 'onesta', 'ongles', 'onm', 'onmyway', 'ont', 'onu', 'onvarigoler', 'op', 'open', 'opendata', 'openingceremony', 'openinno', 'openinnovation', 'opera', 'operation', 'operationmontblanc', 'operationnel', 'operationnelle', 'operationnelles', 'operations', 'opere', 'operer', 'oportunite', 'opportunite', 'opportunites', 'opposants', 'optimiser', 'optimisme', 'optimiste', 'optique', 'or', 'orange', 'orbite', 'ordinaire', 'ordonnance', 'ordre', 'ordures', 'ores', 'orga', 'organ', 'organisait', 'organisat', 'organisateur', 'organisateurs', 'organisation', 'organisations', 'organisatrices', 'organise', 'organisee', 'organisent', 'organiser', 'organisera', 'organiseront', 'organises', 'orient', 'orientation', 'orientations', 'orig', 'originalite', 'origine', 'orlando', 'orlinsk', 'orphelinat', 'ortf', 'orties', 'ortolans', 'oscar', 'oscille', 'ose', 'oseille', 'otage', 'ou', 'ouai', 'ouaiiiii', 'ouaiiiiii', 'oubliais', 'oublie', 'oublier', 'oublions', 'ouest', 'oui', 'oury', 'ous', 'out', 'outil', 'outils', 'outre', 'outremer', 'ouver', 'ouvert', 'ouverte', 'ouverts', 'ouverture', 'ouvrage', 'ouvrant', 'ouvre', 'ouvriers', 'ouvrira', 'overboard', 'overcool', 'overdose', 'overdosedeviolence', 'ovg', 'ovpl', 'oxford', 'p', 'pa', 'pabo', 'package', 'pactole', 'page', 'pagny', 'paie', 'paieront', 'paille', 'pain', 'paix', 'pajol', 'pakman', 'palais', 'palet', 'palier', 'pallez', 'pam', 'paname', 'pandathlon', 'panel', 'panem', 'panique', 'panne', 'pannes', 'pape', 'papefrancois', 'papiers', 'paques', 'par', 'para', 'paradis', 'paradoxe', 'parait', 'paraitre', 'paralymp', 'paralympics', 'paralympique', 'paralympiques', 'parc', 'parcdesprinces', 'parce', 'parcourir', 'parcours', 'parcourus', 'pardon', 'pareil', 'parents', 'parfait', 'parfaite', 'parfaitement', 'parfois', 'parfum', 'pari', 'paribas', 'parioca', 'paritaire', 'parite', 'parking', 'parlaient', 'parle', 'parler', 'parlerons', 'parlez', 'parliez', 'parlons', 'parmentier', 'parmi', 'parois', 'parole', 'paroles', 'parrain', 'part', 'partage', 'partagent', 'partageons', 'partager', 'partagez', 'partait', 'partenaire', 'partenaires', 'partenar', 'partenariale', 'partenariat', 'partenariats', 'parti', 'participait', 'participatif', 'participation', 'participative', 'participe', 'participent', 'participer', 'participez', 'participons', 'particuliere', 'particulierement', 'partie', 'partir', 'partis', 'partout', 'party', 'paru', 'parvis', 'pas', 'pascal', 'pasivite', 'pass', 'passage', 'passait', 'passant', 'passants', 'passe', 'passent', 'passer', 'passerelle', 'passes', 'passion', 'passionnant', 'passionnante', 'pat', 'pates', 'patienter', 'patineuse', 'patisserie', 'patrick', 'patrickfiori', 'patrimoine', 'patron', 'patronage', 'pau', 'paul', 'paule', 'paulo', 'pause', 'pauvres', 'pauvrete', 'pavade', 'paye', 'payer', 'payera', 'payne', 'payons', 'pays', 'paysdelaloire', 'paysdeloire', 'pb', 'pb18', 'pbs', 'pc', 'pcf', 'pcq', 'pcqon', 'pdf', 'pdt', 'peau', 'peche', 'pecresse', 'pedagogique', 'peine', 'peinturebiodegradable', 'pekin', 'pekin2022', 'pen', 'penalisent', 'penche', 'pendant', 'pendent', 'pensaient', 'pensance', 'pensant', 'pense', 'pensee', 'pensent', 'penser', 'pensez', 'pensons', 'penultinien', 'percent', 'percevoir', 'percevra', 'perche', 'perches', 'perdent', 'perdre', 'perdu', 'perdus', 'pere', 'perec', 'perenniser', 'perennite', 'perf', 'performance', 'performance2024', 'performances', 'periode', 'peripherie', 'peripherique', 'perl', 'perm', 'permanent', 'permanente', 'permanentes', 'permet', 'permettre', 'perso', 'personnalite', 'personnalites', 'personne', 'personnel', 'personnellement', 'personnels', 'personnes', 'perspective', 'perspectives', 'persuade', 'persuadent', 'perte', 'pertes', 'pertinent', 'perturbent', 'peser', 'pessimiste', 'petanque', 'peter', 'petit', 'petite', 'petites', 'petiti', 'petitio', 'petition', 'petits', 'peu', 'peupl', 'peuple', 'peur', 'peut', 'peuvent', 'peux', 'peyongchang2018', 'pharaoniques', 'phares', 'phase', 'phenomenale', 'philippe', 'philosophie', 'philou', 'phiphou', 'phot', 'photo', 'photos', 'phreatiques', 'physique', 'pianos', 'pib', 'pics', 'pieces', 'pied', 'pieds', 'piegeac', 'pierre', 'pile', 'pilier', 'pilo', 'pilote', 'pilotee', 'piloter', 'pilule', 'pin', 'pinault', 'pingpongfrancais', 'pinocchio', 'pioche', 'piqure', 'pire', 'pis', 'piscin', 'piscine', 'piscines', 'piscinesolympiques', 'pist88', 'piste', 'pistes', 'piteuse', 'pitie', 'pitre', 'pjl', 'pjlolympique', 'pl', 'place', 'placee', 'placer', 'placera', 'places', 'plafond', 'plage', 'plaidant', 'plaide', 'plaine', 'plainecommune', 'plaisir', 'plaisirs', 'plait', 'plaiz', 'plan', 'planche', 'plane', 'planet', 'planetaire', 'planete', 'plans', 'planter', 'plantes', 'plantu', 'planvoile', 'plaquetournante', 'plastif', 'plat', 'plateau', 'plateforme', 'playback', 'playithuman', 'plaza', 'ple', 'plebiscite', 'pleiade', 'plein', 'pleine', 'pleines', 'pleins', 'pleniere', 'pleu', 'pleyel', 'plie', 'plier', 'plombee', 'plombees', 'plonger', 'plongeur', 'plouay', 'plsu', 'plui', 'pluie', 'plupart', 'pluri', 'pluriel', 'plus', 'plusi', 'plusieurs', 'plutot', 'pme', 'pmi', 'pmp', 'pmrstreet', 'pnl', 'po', 'poaefe2018', 'poches', 'podiu', 'podium', 'pognon', 'pogrom', 'poids', 'poignee', 'poil', 'poing', 'point', 'pointe', 'pointer', 'points', 'pokora', 'pole', 'polemique', 'polemiques', 'police', 'politicien', 'politiciens', 'politiq', 'politique', 'politiques', 'polluer', 'pollution', 'polo', 'polos', 'polpor', 'polynesie', 'pomo', 'pond', 'poney', 'pongistes', 'pont', 'pontoise', 'pop', 'populai', 'populaire', 'popularite', 'populasse', 'populatio', 'population', 'populations', 'populous', 'porcheville', 'porfra', 'portables', 'portant', 'porte', 'portee', 'porter', 'portera', 'portes', 'porteur', 'porteuse', 'portolano', 'portons', 'portrait', 'portugal', 'posait', 'pose', 'posees', 'posent', 'poser', 'positif', 'position', 'positionne', 'positionnement', 'positives', 'possible', 'possibles', 'post', 'postale', 'poste', 'postes', 'postuler', 'potentiel', 'potentielles', 'potentiels', 'pots', 'pou', 'poubelles', 'pouce', 'pouffer', 'poule', 'poules', 'pour', 'pourboire', 'pourcentage', 'pourquoi', 'pourqupi', 'pourra', 'pourraient', 'pourrait', 'pourri', 'pourrie', 'pourriez', 'pourris', 'pourriture', 'pourrons', 'pourront', 'poursuit', 'poursuite', 'poursuivre', 'pourtant', 'pourtous', 'pourvu', 'pousser', 'pouvait', 'pouvez', 'pouvoir', 'pouvoirs', 'pouvons', 'power', 'ppl', 'ppp', 'pr', 'pratiq', 'pratiquants', 'pratique', 'pratiques', 'pre', 'precede', 'precieuse', 'precis', 'precise', 'preconise', 'precurseur', 'predecesseurs', 'prefabriquee', 'prefaceur', 'prefe', 'prefere', 'prefererait', 'preferes', 'preferez', 'preliminaire', 'premier', 'premiere', 'premieres', 'premiers', 'prenant', 'prend', 'prendre', 'prends', 'prenez', 'prennent', 'prenons', 'preoccupant', 'preparatifs', 'preparation', 'preparatqu', 'prepare', 'preparent', 'preparer', 'prepares', 'preparez', 'prepatation', 'pres', 'presence', 'present', 'presentation', 'presente', 'presentent', 'presenter', 'presentera', 'presentes', 'presentiel', 'presents', 'preserver', 'presi', 'presidee', 'presidence', 'president', 'presidentdelarepublique', 'presidente', 'presidents', 'presonnes', 'presque', 'pressadom', 'presse', 'pression', 'presta', 'prestataires', 'prestation', 'prestige', 'pret', 'prete', 'pretendre', 'pretention', 'pretes', 'pretextant', 'pretexte', 'prets', 'preuve', 'prevenir', 'prevention', 'preventive', 'prevenu', 'prevenus', 'previsible', 'prevision', 'previsionnel', 'previsions', 'prevoient', 'prevoir', 'prevoit', 'prevu', 'prevue', 'prevues', 'prevus', 'pri', 'prie', 'prier', 'priere', 'primaire', 'primaire2016', 'prime', 'principal', 'principaux', 'principe', 'principederealite', 'principes', 'prions', 'priori', 'priorit', 'prioritaire', 'prioritaires', 'priorite', 'priorites', 'pris', 'prise', 'prises', 'prison', 'prithika', 'privatisation', 'privatisations', 'privatise', 'prive', 'privee', 'prives', 'privilegie', 'privilegies', 'prix', 'pro', 'probablement', 'probe', 'problematique', 'probleme', 'problemes', 'procedure', 'procedures', 'processus', 'prochain', 'prochaine', 'prochainement', 'prochaines', 'prochains', 'proche', 'proches', 'prod', 'production', 'productions', 'productivite', 'produit', 'produits', 'prof', 'profe', 'professeurs', 'professionals', 'professionnel', 'professionnelles', 'professionnels', 'profi', 'profit', 'profite', 'profiter', 'profitera', 'profiteront', 'profitons', 'profits', 'profonde', 'profondement', 'profondes', 'profonds', 'profs', 'prog', 'programmateurs', 'programmation', 'programme', 'programmee', 'progres', 'progresser', 'project', 'projecteur', 'projecteurs', 'projet', 'projets', 'projette', 'prolos', 'promesses', 'promet', 'promettre', 'promis', 'promo', 'promoteur', 'promoteurs', 'promotion', 'promotionnel', 'promouvant', 'promouvoir', 'pronojdd', 'prononcer', 'pronostique', 'propagande', 'propos', 'proposant', 'propose', 'proposecommewauquiez', 'proposer', 'proposes', 'proposez', 'proposition', 'propositions', 'proposons', 'proprement', 'propres', 'proprete', 'propri', 'prostates', 'protection', 'protegelaplanete', 'proteger', 'protocole', 'prouver', 'province', 'provinciaux', 'provisoire', 'provisoires', 'provoque', 'provoquer', 'proximite', 'prsdt', 'ps', 'psdt', 'pse', 'psg', 'psgasm', 'psgol', 'psgom', 'pste', 'psychologiquement', 'ptdrrrr', 'ptits', 'ptn', 'pts', 'pu', 'puanteur', 'pub', 'publi', 'public', 'publication', 'publici', 'publicite', 'publics', 'publier', 'publiera', 'publique', 'publiquement', 'publiques', 'pubs', 'puent', 'pugilat', 'puis', 'puiseent', 'puisqu', 'puisque', 'puisse', 'puit', 'punition', 'punk', 'pupponi', 'pur', 'purpose', 'putain', 'putains', 'pyeonchang2018', 'pyeongchang', 'pyeongchang2018', 'pyongchang2018', 'pyongyang2018', 'python', 'q', 'qd', 'qq', 'qqls', 'qques', 'qu', 'qua', 'quadrilingue', 'qualifica', 'qualification', 'qualifiee', 'qualifient', 'qualifier', 'qualifiera', 'qualifies', 'qualifs', 'qualitative', 'qualite', 'quan', 'quand', 'quantite', 'quartier', 'quartiers', 'quartus', 'quasi', 'quatre', 'quautant', 'quavec', 'que', 'queen', 'quel', 'quelle', 'quelles', 'quelqu', 'quelque', 'quelques', 'quels', 'quentin', 'quentinois', 'quest', 'question', 'questionnaire', 'questionnee', 'questionnent', 'questions', 'quetes', 'qui', 'quid', 'quil', 'quils', 'quinoa', 'quinon', 'quiose', 'quiseront', 'quitter', 'quittons', 'quoi', 'quoiqu', 'quon', 'quora', 'quot', 'quotidien', 'quun', 'quune', 'qvt', 'r', 'r92st', 'ra', 'raaa', 'rab', 'rabais', 'rabattu', 'rabault', 'racisme', 'racontait', 'raconte', 'radio', 'rafale', 'rager', 'raggi', 'rails', 'raison', 'raisonnable', 'rajouter', 'ralentie', 'ralentit', 'raler', 'ralite', 'ramassent', 'rame', 'ramene', 'ramener', 'ramer', 'rang', 'ranimer', 'rapide', 'rappel', 'rappele', 'rappeler', 'rappelle', 'rappellent', 'rappelons', 'rapple', 'rapport', 'rapporte', 'rapporter', 'rapporteraient', 'rapporteront', 'rapportonesta', 'rapports', 'rapproche', 'rapprocher', 'rare', 'rarement', 'rassemble', 'rassemblee', 'rassemblent', 'rassembles', 'rassurant', 'rassurante', 'rassure', 'rassures', 'rassurez', 'rat', 'rate', 'ratee', 'rater', 'ratez', 'rationnel', 'ratp', 'rats', 'rattrapage', 'rattrape', 'ravi', 'ravie', 'ravis', 'ravva', 'rayonne', 'rayonnent', 'rayonner', 'raz', 'rdv', 'rdvdu', 're', 'reaction', 'reactions', 'read', 'ready', 'ready24', 'reaffirme', 'reaffirment', 'reagit', 'realisation', 'realise', 'realisee', 'realisees', 'realiste', 'realistes', 'realite', 'realitevirtuelle', 'reamenagement', 'rebelote', 'rebeu', 'rebondisses', 'reboul', 'recale', 'recap', 'recents', 'reception', 'receptions', 'recette', 'recettes', 'recevait', 'recevoir', 'recevra', 'recherche', 'reclame', 'reclamez', 'recoit', 'recomm', 'recompense', 'recompensee', 'recon', 'reconcilier', 'reconnais', 'reconnaiss', 'reconnaissance', 'reconnait', 'reconnue', 'reconstitution', 'reconvers', 'reconversion', 'reconvertie', 'record', 'recrue', 'recrute', 'recrutement', 'recteur', 'rectificatif', 'rectrice', 'recu', 'recul', 'recule', 'recup', 'recus', 'recycle', 'redaction', 'redempteur', 'redepense', 'redevient', 'rediff', 'rediffusion', 'redit', 'redoute', 'reduction', 'reduire', 'reduite', 'reel', 'reell', 'reellement', 'reels', 'reelue', 'refait', 'refassent', 'refection', 'referendum', 'reflechissez', 'reflexe', 'reflexion', 'reflexions', 'reforme', 'reformer', 'reformes', 'refrain', 'refugies', 'refus', 'refuse', 'refusent', 'refuser', 'regaler', 'regard', 'regarde', 'regardent', 'regarder', 'regarderais', 'regardez', 'regate', 'regeneration', 'region', 'regional', 'regionale', 'regionaux', 'regioncentrevaldeloire', 'regions', 'reglement', 'regler', 'regles', 'regnier', 'regrette', 'regulierement', 'rehabilite', 'reinventer', 'rejeter', 'rejoignez', 'rejoindre', 'rejoint', 'rejouir', 'rejouissent', 'rejouit', 'relais', 'relance', 'relatif', 'relatifs', 'relations', 'relative', 'relativement', 'relayeur', 'releve', 'relever', 'relevez', 'relire', 'reluisant', 'rem', 'remarquable', 'remboursez', 'remercie', 'remerciement', 'remerciements', 'remercier', 'remet', 'remettre', 'remis', 'remplaces', 'remplie', 'remplir', 'remplissage', 'remportait', 'remportant', 'remporte', 'remportent', 'remporter', 'remportes', 'remuneration', 'remunerations', 'remy', 'renaud', 'rencontre', 'rencontrent', 'rencontrer', 'rencontres', 'rend', 'rendent', 'rendez', 'rendinger', 'rendrais', 'rendre', 'rends', 'rendu', 'rendue', 'rendus', 'renforce', 'renforcer', 'renforces', 'rennes', 'renonce', 'renoncement', 'renoncer', 'renouveau', 'renouvelables', 'renovation', 'renove', 'renovee', 'renover', 'renseignement', 'rent', 'rentable', 'rentables', 'rentre', 'rentree', 'rentrer', 'renverront', 'reparler', 'reparlera', 'reparti', 'repartir', 'repere', 'repetitions', 'replay', 'repond', 'repondait', 'repondr', 'repondre', 'repondu', 'reponse', 'reponses', 'report', 'reportage', 'reportee', 'reporter', 'repos', 'repose', 'repost', 'repoussee', 'repousser', 'representant', 'representants', 'represente', 'representee', 'representent', 'representer', 'representes', 'reproche', 'reproduisez', 'republicaine', 'republicaines', 'republique', 'rer', 'rera', 'rerb', 'rese', 'reseau', 'reseaux', 'reserve', 'reservee', 'reserves', 'reside', 'residence', 'resistance', 'resolument', 'respe', 'respect', 'respecte', 'respectee', 'respecter', 'respectezaubervilliers', 'respectivement', 'respectu', 'respectueux', 'respire', 'respirer', 'responsabilite', 'responsable', 'responsables', 'resseau', 'ressemblait', 'ressortir', 'reste', 'restent', 'rester', 'restera', 'resterons', 'restez', 'restitution', 'restons', 'restreint', 'resultat', 'resultats', 'resume', 'ret', 'retard', 'retarde', 'retards', 'retenu', 'reticente', 'retire', 'retirer', 'retombees', 'retour', 'retournement', 'retrait', 'retraite', 'retransmettre', 'retransmis', 'retransmission', 'retransmissions', 'retro', 'retrogrades', 'retrouvailles', 'retrouve', 'retrouver', 'retrouvez', 'retweete', 'retweeted', 'reuni', 'reunie', 'reunies', 'reunion', 'reunions', 'reunis', 'reunit', 'reus', 'reussi', 'reussie', 'reussir', 'reussissent', 'reussite', 'reussites', 'reuters', 'rev', 'revanche', 'reve', 'reveiller', 'revelation', 'revele', 'revelees', 'revelera', 'revendications', 'revendique', 'revenir', 'revenu', 'reveolympique', 'rever', 'revers', 'reversibilite', 'reversibles', 'revez', 'revie', 'reviendra', 'revient', 'reviser', 'revisions', 'revoir', 'revolution', 'revu', 'revue', 'revuperer', 'rfm', 'rg16', 'rh', 'rhabiller', 'rhumatologue', 'richard', 'riche', 'riches', 'richesses', 'ridicule', 'rien', 'rigole', 'rigoureusement', 'rigueur', 'rim', 'rimer', 'riner', 'rio', 'rip', 'rire', 'ris', 'risque', 'risquent', 'risques', 'rites', 'riverains', 'rives', 'rivesdeseine', 'rmc', 'rmclive', 'roadto', 'roadtorio', 'robert', 'rocard', 'roches', 'rock', 'rodez', 'roi', 'rois', 'roissy', 'roland', 'rolandgarros', 'role', 'rolland', 'rom', 'roma2024', 'rome', 'rome2024', 'romera', 'ronde', 'rooftop', 'roron', 'rou', 'rouge', 'roule', 'rouler', 'rounds', 'route', 'routes', 'rsi', 'rsu', 'rt', 'rtbf', 'rtctur', 'rtl', 'rtlmatin', 'rtlsoir', 'ru', 'rubrique', 'ruche', 'rue', 'rues', 'rugb', 'rugby', 'rugby2023', 'rugbyworldcup', 'rugbyworldcup2023', 'ruine', 'ruinent', 'ruiner', 'run', 'runners', 'ruse', 'russe', 'russie', 'rv', 'rwc', 'rwc2023', 'ryadhsallem', 'rydercup', 'rydercup2018', 'rythme', 's', 'sa', 'sabani', 'sable', 'sac', 'sachant', 'sachez', 'saclay', 'sacre', 'sacree', 'sacrifices', 'sacrifiees', 'sacrifier', 'sacrifierons', 'sagit', 'saigner', 'saint', 'saintdenis', 'saintetiennedurouvray', 'saintouen', 'saintvalentin2018', 'sais', 'saisir', 'sait', 'salai', 'salair', 'salaire', 'salaires', 'salee', 'sales', 'salle', 'salles', 'salon', 'salons', 'salue', 'saluee', 'saluent', 'saluer', 'salut', 'salute', 'samedi', 'sanctionne', 'sandro', 'sans', 'sante', 'saouler', 'sape', 'sappuyant', 'sappuyer', 'sarajevo', 'sardines', 'sarko', 'sarkozy', 'sarranger', 'sature', 'sauf', 'saulnier', 'saura', 'sauront', 'saut', 'sauter', 'sauvegarder', 'sav', 'savait', 'save', 'savent', 'savethedate', 'savez', 'savo', 'savoir', 'scalp', 'scandales', 'scandaleux', 'scandalise', 'scapin', 'scene', 'scenes', 'sceptique', 'schema', 'school', 'sciemment', 'sciences', 'scientifique', 'scol', 'scolaire', 'scolaires', 'scooters', 'sdes', 'sdf', 'sdg', 'se', 'seance', 'seancecd93', 'seancepublique', 'sebastien', 'seconder', 'secr', 'secret', 'secretaire', 'secteur', 'secteurs', 'section', 'secu', 'securisation', 'securise', 'securiser', 'securite', 'securitesociale', 'seda', 'sedentarite', 'seduit', 'see', 'seeph2017', 'seigneurs', 'sein', 'seine', 'seinesaintdenis', 'sejo', 'selection', 'selectionnables', 'selectionne', 'selectionnee', 'selectionnes', 'selectionneur', 'selfie', 'selfierate', 'selon', 'sem', 'semaine', 'semaineindustrie', 'semainelfm', 'semaines', 'semble', 'semblent', 'semelles', 'semi', 'seminaire', 'semmerder', 'senat', 'senat360', 'senateur', 'senateurs', 'sengage', 'sengagent', 'senior', 'seniors', 'sens', 'sensation', 'sensdesmots', 'sensibilises', 'sensible', 'sent', 'sentendre', 'senti', 'sentinelle', 'sentir', 'separent', 'sept', 'septahuit', 'septembre', 'sera', 'serai', 'seraient', 'serais', 'serait', 'seras', 'serein', 'sereins', 'serie', 'serieuse', 'serieusement', 'serieuses', 'serieux', 'serions', 'serons', 'seront', 'serre', 'serres', 'serresauteuil', 'serresdauteuil', 'sert', 'servaient', 'servait', 'servi', 'service', 'services', 'servir', 'servira', 'servirait', 'serviront', 'ses', 'session', 'sessions', 'sest', 'seu', 'seul', 'seule', 'seulement', 'seuls', 'several', 'severe', 'sevran', 'sexisme', 'sexuelle', 'sexy', 'sf', 'shakeup', 'share', 'sheila', 'shirt', 'show', 'showeuro2016', 'shows', 'si', 'sia2018', 'siaap', 'sido18', 'siecle', 'siege', 'sieste', 'signal', 'signalisation', 'signataire', 'signature', 'signatures', 'signe', 'signees', 'signent', 'signez', 'silence', 'silicon', 'siliconvalley', 'simagrees', 'simone', 'simonet', 'simonnet', 'simple', 'simplem', 'simplement', 'simplet', 'simplifier', 'sincere', 'sincerite', 'singapour', 'single', 'sinistres', 'sinon', 'sinquiete', 'sinquietent', 'sinteresse', 'site', 'sites', 'situatio', 'situation', 'situvasario', 'six', 'skateboard', 'skb', 'ski', 'skieur', 'slogan', 'slovenie', 'smart', 'smartcity', 'smartgrids', 'smartmobility', 'smet', 'smic', 'sncf', 'sncfordonnances', 'snober', 'so', 'social', 'socialbusiness', 'sociale', 'socialement', 'sociales', 'socialiste', 'socialistes', 'socialmedia', 'socialos', 'socialparty', 'sociaux', 'societal', 'societaux', 'societe', 'societeapprenante', 'societes', 'socle', 'soeur', 'sofia', 'sofrench', 'soft', 'softpower', 'soi', 'soient', 'soir', 'soiree', 'soireeindecise', 'soit', 'solidaire', 'solidaires', 'solidarite', 'solide', 'solideo', 'solution', 'solutions', 'sommes', 'sommet', 'son', 'sondage', 'songeurs', 'sonne', 'sont', 'sop2018', 'sophie', 'sorbonne', 'sort', 'sorte', 'sortie', 'sorties', 'sortir', 'sortis', 'sortons', 'sos', 'sotchi', 'sou', 'souci', 'soucis', 'soudees', 'souf', 'souffle', 'souhait', 'souhaitais', 'souhaitant', 'souhaite', 'souhaites', 'souhaitons', 'soulagement', 'soule', 'souligne', 'soupirs', 'source', 'sourds', 'sourire', 'sourires', 'souris', 'sous', 'soustons', 'soutenabilite', 'soutenir', 'soutenons', 'soutenu', 'souterrain', 'soutien', 'soutiennent', 'soutiens', 'soutient', 'souvenir', 'souvenirs', 'souvre', 'soyez', 'soyons', 'sp', 'special', 'speciale', 'specialise', 'specialiste', 'specialistes', 'specialite', 'specifique', 'spectacle', 'spectateu', 'spectateur', 'spectateurs', 'spectatuer', 'speculateurs', 'speculation', 'speculationsimmobilieres', 'splatoonec', 'splendide', 'spo', 'sponsor', 'sponsoring', 'sponsoriser', 'sponsors', 'spor', 'sporsora18', 'sprint', 'sprinters', 'squash', 'sqy', 'sra', 'ssd', 'ssd93', 'st', 'sta', 'stade', 'stade2', 'stadede', 'stadeolympique', 'stades', 'stadium', 'stage', 'stains', 'stand', 'standards', 'staps', 'stapsenperil', 'starcraft', 'start', 'starting', 'startings', 'startup', 'startupnation', 'startups', 'station', 'stationnement', 'stations', 'statue', 'statut', 'steph', 'stif', 'stop', 'story', 'stp', 'strade', 'strangersthings2', 'strasbourg', 'strategie', 'strategies', 'strategique', 'strike', 'structurant', 'structure', 'structurel', 'structurelle', 'structures', 'studio', 'stupidite', 'su', 'subsistent', 'succes', 'sucre', 'sud', 'suer', 'suffisant', 'suffit', 'suggestion', 'sui', 'suis', 'suisse', 'suite', 'suivez', 'suivi', 'suivra', 'suivre', 'suivront', 'sujet', 'sujets', 'super', 'superb', 'superbe', 'superflu', 'superflue', 'supermetro', 'superstitieux', 'supplement', 'supplementaire', 'supplementaires', 'supporter', 'supporters', 'supports', 'suppose', 'sur', 'surcout', 'surcouts', 'surcroit', 'sure', 'surement', 'surendettee', 'surf', 'surfer', 'surmonter', 'surplus', 'surpris', 'surprise', 'surs', 'sursis', 'surtou', 'surtout', 'surveillante', 'susc', 'suscite', 'suscitent', 'suspendue', 'svp', 'svplanete', 'swear', 'swim', 'swing', 'sylvievartan', 'symbole', 'symboles', 'symbolique', 'symboliques', 'sympa', 'sympathie', 'sympathique', 'syndicales', 'syndicalistes', 'syndicats', 'synthese', 'synthetisant', 'syrie', 'syst', 'systeme', 'systemique', 'szymanski', 't', 't2', 'ta', 'table', 'tableau', 'tabler', 'tables', 'tache', 'taekwendo', 'taekwondo', 'tahu', 'taille', 'tait', 'tal', 'talents', 'tank', 'tant', 'tantale', 'tanzanien', 'tapis', 'taquet', 'taraude', 'tard', 'tarif', 'tartes', 'tattoolete', 'taulier', 'taxe', 'taxi', 'taxis', 'tb', 'tbt', 'tc', 'tdf2016', 'tdtc', 'te', 'team', 'teamforce', 'teamimpatient', 'teamlifa', 'teampandanouslesjeux', 'teamunss', 'teaser', 'teasing', 'tech', 'techniques', 'technologies', 'technologique', 'technos', 'teddy', 'teddydecima', 'tee', 'teinture', 'tel', 'telco', 'tele', 'telecom', 'telephones', 'telespectateurs', 'television', 'telle', 'tellement', 'tels', 'temoign', 'temoignage', 'temple', 'temporaire', 'temporairement', 'temporaires', 'temps', 'tenable', 'tenaille', 'tenais', 'tenant', 'tendance', 'tendances', 'tenir', 'tennis', 'tennisactu', 'tennisdetable', 'tension', 'tensions', 'tentat', 'tentative', 'tente', 'tenter', 'tentes', 'tenu', 'tenue', 'tenus', 'terme', 'termes', 'terminal', 'terminale', 'termine', 'terminees', 'terminer', 'termines', 'terrain', 'terrains', 'terrasse', 'terre', 'terredejeux', 'terres', 'terreur', 'territo', 'territoire', 'territoires', 'territoiresdinfos', 'territorial', 'territoriale', 'terrorisme', 'terroriste', 'terroristes', 'tes', 'test', 'testee', 'testosterone', 'tete', 'tetes', 'texte', 'textileinnovations', 'tf1', 'thailande', 'thauvin', 'the', 'thematiques', 'theme', 'themes', 'therapie', 'thevenoud', 'thevoice', 'thevoicekids', 'thibault', 'thierno', 'thierry', 'think', 'thobois', 'thomas', 'thread', 'throwback', 'ti', 'tian', 'ticket', 'tickets', 'tiendrons', 'tiens', 'tient', 'tingyang', 'tiny', 'tir', 'tirer', 'tireront', 'tissu', 'tit', 'titre', 'titree', 'titres', 'titulaire', 'tjs', 'tlt2017', 'tnt', 'to', 'today', 'togolais', 'toi', 'toiaussiannuleuntruc', 'toile', 'toilettes', 'tokyo', 'tokyo2020', 'tombe', 'tombent', 'tomo', 'ton', 'tonalite', 'tonique', 'tony', 'tonya', 'tonyestanguet', 'too', 'top', 'topcom18', 'torcy', 'torpiller', 'tot', 'total', 'totalement', 'tou', 'touche', 'touchera', 'touches', 'toujours', 'toujoursplus', 'toulousain', 'toulouse', 'tour', 'tourism', 'tourisme', 'tourismeaffaires', 'touristes', 'touristique', 'touristiques', 'tourner', 'tournoi', 'tours', 'tourtriangle', 'tous', 'tousathletesdelaville', 'tousprets', 'tout', 'toute', 'toutes', 'toxic', 'toyota', 'toyotamonde', 'tpmp', 'tpms', 'tps', 'tqo', 'tr', 'trace', 'traduction', 'trafique', 'train', 'training', 'trains', 'traitresse', 'trajet', 'tram', 'tran', 'tranquille', 'transcender', 'transformation', 'transforme', 'transformee', 'transformer', 'transilien', 'transitionenergetique', 'transmet', 'transparence', 'trappes', 'trav', 'travail', 'travaille', 'travaillent', 'travailler', 'travailleurs', 'travaux', 'travers', 'traversee', 'tremplin', 'tres', 'tresor', 'trevise', 'trialb', 'triathlon', 'tribunal', 'tribune', 'tribunes', 'trichent', 'tricherie', 'trick', 'tricolores', 'trio', 'triomphe', 'triompher', 'triple', 'tripler', 'triste', 'trocs', 'trois', 'troisieme', 'trolling', 'trompe', 'tromper', 'troncons', 'trop', 'trophee', 'troque', 'trottinette', 'trottoirs', 'troussel', 'trouve', 'trouvee', 'trouver', 'trouverez', 'trouves', 'trouvez', 'truc', 'truchot', 'trump', 'tt', 'tte', 'ttes', 'tu', 'tumultueuse', 'tunnels', 'turbo', 'turques', 'tv', 'tv5monde', 'tw974', 'tweet', 'tweets', 'twintowers', 'twirling', 'twitter', 'type', 'u', 'u17', 'u18', 'uber', 'ubi', 'udi', 'ue', 'uefa', 'uefaeuro2016', 'uf', 'ufs', 'uk', 'ultra', 'ultrahd', 'ultramoderne', 'ultras', 'ulysse', 'un', 'unanime', 'unanimiste', 'unanimite', 'une', 'unebellebandedegoistes', 'unes', 'uni', 'unie', 'uniformation', 'unimev', 'unique', 'uniquement', 'unir', 'unis', 'unissent', 'unite', 'united', 'unitenationale', 'univers', 'universel', 'universelle', 'universitaire', 'universite', 'uns', 'unss', 'up', 'update', 'ups', 'urbain', 'urbaine', 'urbaines', 'urbanism', 'urbanisme', 'urgence', 'urgent', 'us', 'usa', 'usager', 'usagers', 'usagersidf', 'usep75', 'usepiades', 'user', 'utile', 'utiles', 'utilisation', 'utiliser', 'utilisera', 'utilisez', 'utilisons', 'utilite', 'uvre', 'uvres', 'v', 'va', 'vacances', 'vache', 'vague', 'vaincre', 'vaine', 'vaire', 'vaires', 'vairessurmarne', 'vairestorcy', 'vais', 'val', 'valait', 'vale', 'valent', 'valerie', 'valeur', 'valeurs', 'valeyre', 'valide', 'valident', 'valider', 'valides', 'valley', 'valoriser', 'vanne', 'vante', 'vaporise', 'vaultier', 'vaut', 've', 'vecteur', 'vecu', 'vedette', 'vegas', 'vehiculer', 'vehicules', 'vehiculesautonomes', 'veille', 'veiller', 'velib', 'velibgate', 'velo', 'velodrome', 'venant', 'venants', 'vend', 'vendre', 'vendredi', 'vendu', 'vendue', 'vendus', 'venez', 'venir', 'vente', 'ventes', 'ventre', 'venu', 'venue', 'venus', 'ver', 'verbalise', 'verbaliser', 'verdict', 'verglas', 'verification', 'verifie', 'veritable', 'veritables', 'verre', 'verrez', 'verront', 'vers', 'versez', 'version', 'versions', 'vert', 'vertone', 'vesoul', 'vesperini', 'vessies', 'vetustes', 'veulent', 'veut', 'veux', 'vi', 'via', 'viable', 'vibre', 'vibrer', 'vice', 'vichy', 'vict', 'victime', 'victimes', 'victoire', 'victoires', 'victor', 'vide', 'video', 'vides', 'vie', 'vieillir', 'vienne', 'viennent', 'viens', 'viensvoirmontaf', 'vient', 'vierge', 'vies', 'vieux', 'viewerdelombre', 'vif', 'vigila', 'vigilanc', 'vigilants', 'vigueur', 'village', 'villageolympique', 'villages', 'ville', 'villededemain', 'villepoubelle', 'villes', 'villetaneuse', 'vincennes', 'vingt', 'vins', 'viole', 'violence', 'virginia', 'virginie', 'vis', 'visage', 'vise', 'viseur', 'visibilite', 'visiblement', 'vision', 'visions', 'visit', 'visite', 'visitent', 'visiter', 'visitera', 'viste', 'visuel', 'visuels', 'vit', 'vitaux', 'vite', 'vitesse', 'vitrine', 'vivatech', 'vive', 'vivem', 'vivement', 'vivez', 'vivre', 'vivreensemble', 'vla', 'vo', 'voe', 'voeu', 'voici', 'voie', 'voient', 'voies', 'voila', 'voile', 'voiles', 'voilier', 'voir', 'vois', 'voisins', 'voit', 'voiturage', 'voiture', 'voitures', 'voix', 'vol', 'volera', 'volet', 'volley', 'volontaire', 'volonte', 'vols', 'vont', 'vos', 'vote', 'voter', 'votera', 'votez', 'votons', 'votre', 'voudraient', 'voudrais', 'voulait', 'voulant', 'voulez', 'vouloir', 'voulons', 'voulus', 'vous', 'voyage', 'voyages', 'voyageur', 'voyants', 'voyez', 'vp', 'vr', 'vrai', 'vraie', 'vraies', 'vraiment', 'vrais', 'vrp', 'vs', 'vtep', 'vtt', 'vu', 'vue', 'w', 'waiona', 'walt', 'waow', 'wapler', 'water', 'wc2019', 'we', 'weare24', 'wearethechampions', 'web', 'webdoc', 'wednesday', 'weed', 'week', 'weekend', 'wellness', 'whaoou', 'whaou', 'what', 'whatelse', 'why', 'wifi', 'will', 'willem', 'winner', 'with', 'word', 'workout', 'workshop', 'world', 'worldcup', 'worldcup2023', 'worldwide', 'wow', 'wrc2023', 'wtf', 'wtfdj', 'wwf', 'x', 'x2', 'xfemmes', 'xpot', 'xpotfens', 'xvde', 'xviiieme', 'xxl', 'y', 'yannick', 'yaura', 'yeesss', 'yesterday', 'yeux', 'ymca', 'yo', 'you', 'youpi', 'your', 'yunus', 'yvelines', 'yves', 'z', 'zac', 'zep', 'zermi', 'zero', 'zevent', 'zhao', 'zimbalist', 'zo', 'zone', 'zoologique', 'zut']\n[('pour', 2548), ('de', 2287), ('la', 1602), ('les', 1544), ('a', 1446), ('le', 1273), ('et', 1003), ('des', 898), ('en', 790), ('l', 743), ('du', 633), ('est', 554), ('un', 533), ('d', 516), ('une', 508), ('sur', 383), ('on', 359), ('jo', 342), ('que', 341), ('pas', 337), ('qui', 335), ('c', 317), ('s', 311), ('avec', 304), ('jeux', 283), ('au', 274), ('dans', 271), ('ce', 268), ('aux', 266), ('il', 243), ('hidalgo', 234), ('bien', 206), ('nous', 181), ('2024', 179), ('ne', 179), ('ca', 174), ('va', 173), ('se', 170), ('grand', 162), ('je', 161), ('par', 149), ('mais', 143), ('vous', 142), ('plus', 136), ('via', 129), ('y', 128), ('merci', 126), ('olympiques', 125), ('ont', 120), ('faire', 119), ('pari', 119), ('tout', 118), ('etre', 117), ('n', 112), ('gagne', 109), ('qu', 108), ('tous', 103), ('olympique', 102), ('sont', 102), ('monde', 101), ('ceux', 100), ('euro2016', 96), ('meme', 94), ('1', 93), ('son', 92), ('2023', 91), ('projet', 87), ('j', 84), ('cette', 83), ('tu', 82), ('fait', 81), ('anne', 78), ('apres', 78), ('sera', 77), ('amp', 76), ('deja', 76), ('nos', 76), ('notre', 74), ('concert', 74), ('cest', 73), ('ou', 71), ('peut', 70), ('sa', 70), ('francais', 70), ('ville', 70), ('2', 69), ('bonne', 69), ('budget', 68), ('feter', 67), ('president', 66), ('hollande', 66), ('macron', 65), ('tres', 65), ('ans', 65), ('nouvelle', 65), ('comme', 64), ('si', 64), ('bravo', 64), ('ces', 63), ('fr', 62), ('contre', 62), ('anouslesjeux', 62), ('avant', 61), ('avoir', 61), ('etait', 57), ('encore', 57), ('rio', 57), ('pendant', 56), ('quand', 55), ('soir', 55), ('faveur', 53), ('content', 52), ('milliards', 52), ('non', 51), ('aussi', 50), ('belle', 50), ('comment', 50), ('leur', 50), ('maire', 49), ('athletes', 49), ('comite', 48), ('organisation', 47), ('gt', 46), ('ifs', 46), ('3', 45), ('pays', 45), ('ses', 44), ('rugby', 44), ('toi', 43), ('p', 42), ('ete', 42), ('ils', 42), ('cout', 42), ('coupe', 42), ('nouveau', 41), ('coup', 41), ('bon', 41), ('faut', 41), ('denis', 41), ('droit', 41), ('te', 41), ('loi', 41), ('millions', 40), ('certains', 40), ('heritage', 39), ('alors', 39), ('risques', 39), ('rapport', 39), ('seront', 38), ('sans', 38), ('depuis', 38), ('detre', 38), ('trop', 38), ('vers', 37), ('6', 37), ('elle', 37), ('saint', 37), ('place', 37), ('attendant', 37), ('e', 36), ('fete', 36), ('voir', 35), ('m', 35), ('soutien', 35), ('seine', 35), ('marseille', 35), ('savent', 35), ('vacances', 34), ('estanguet', 34), ('entre', 34), ('demain', 33), ('danser', 33), ('veut', 33), ('suis', 33), ('10', 32), ('aura', 32), ('paralympiques', 32), ('mon', 32), ('parti', 32), ('tran', 32), ('reconnais', 32), ('elysee', 32), ('500', 32), ('surcouts', 32), ('pr', 31), ('mieux', 31), ('innovation', 31), ('fois', 31), ('equipe', 31), ('montrer', 31), ('bosse', 31), ('euros', 31), ('matin', 30), ('dit', 30), ('dossier', 30), ('choree', 30), ('mal', 30), ('t', 30), ('if', 30), ('travaux', 29), ('rt', 29), ('accueillir', 29), ('mode', 29), ('jeunes', 29), ('sous', 29), ('express', 29), ('moins', 29), ('7', 29), ('joie', 29), ('juste', 29), ('donc', 29), ('18', 29), ('toute', 29), ('aujourd', 28), ('hui', 28), ('rien', 28), ('5', 28), ('victoire', 28), ('vont', 28), ('question', 28), ('me', 28), ('votre', 28), ('organiser', 27), ('payer', 27), ('temps', 27), ('pourquoi', 27), ('retour', 27), ('grands', 27), ('ensemble', 27), ('doit', 27), ('hotel', 27), ('opportunites', 27), ('evenements', 27), ('raison', 26), ('jour', 26), ('appel', 26), ('securite', 26), ('medailles', 26), ('opportunite', 26), ('argent', 26), ('vu', 26), ('projets', 26), ('geste', 26), ('quoi', 26), ('budgetaire', 26), ('route', 25), ('chance', 25), ('cc', 25), ('danse', 25), ('dab', 25), ('maitrises', 25), ('ouverture', 25), ('soit', 25), ('quelle', 25), ('conference', 25), ('evenement', 24), ('20', 24), ('ien', 24), ('toujours', 24), ('construction', 24), ('2018', 24), ('smart', 24), ('construire', 24), ('tony', 24), ('defendre', 24), ('prete', 23), ('15', 23), ('besoin', 23), ('annonce', 23), ('rerb', 23), ('peu', 23), ('plan', 23), ('stade', 23), ('startups', 23), ('completement', 23), ('celebration', 23), ('classe', 23), ('donne', 23), ('direct', 23), ('concertation', 23), ('porte', 23), ('soutenir', 23), ('ici', 22), ('durable', 22), ('elus', 22), ('moi', 22), ('etat', 22), ('passe', 22), ('mobilisation', 22), ('dire', 22), ('emploi', 22), ('tour', 22), ('parler', 22), ('celebrer', 22), ('campagne', 22), ('derapage', 22), ('felicitations', 22), ('jours', 22), ('avons', 22), ('enjeux', 22), ('alerte', 22), ('beau', 21), ('quel', 21), ('devenir', 21), ('premiere', 21), ('toutes', 21), ('francaise', 21), ('epreuves', 21), ('or', 21), ('larrivee', 21), ('maintenant', 21), ('sarkozy', 21), ('expo', 20), ('propose', 20), ('economie', 20), ('tourisme', 20), ('preparer', 20), ('profiter', 20), ('journee', 20), ('travail', 20), ('hysterique', 20), ('prepare', 20), ('cela', 20), ('quelques', 20), ('acteurs', 20), ('villes', 20), ('rolandgarros', 20), ('an', 20), ('prix', 19), ('smartcity', 19), ('o', 19), ('choix', 19), ('prets', 19), ('jusqu', 19), ('officiel', 19), ('vue', 19), ('hate', 19), ('ai', 19), ('financier', 19), ('francois', 19), ('delegation', 19), ('ceremonie', 19), ('f', 19), ('pret', 19), ('reunion', 19), ('ienne', 18), ('2025', 18), ('metro', 18), ('soiree', 18), ('debat', 18), ('village', 18), ('public', 18), ('ligne', 18), ('accueil', 18), ('sncf', 18), ('000', 18), ('cet', 18), ('territoire', 18), ('presente', 17), ('mois', 17), ('dune', 17), ('top', 17), ('idee', 17), ('oui', 17), ('ds', 17), ('touristes', 17), ('debut', 17), ('mobilite', 17), ('ma', 17), ('site', 17), ('annees', 17), ('petit', 17), ('jamais', 17), ('champions', 17), ('commence', 17), ('moment', 17), ('parle', 17), ('mairie', 17), ('super', 17), ('organise', 17), ('societe', 17), ('image', 17), ('gouvernement', 17), ('importants', 17), ('pyeongchang2018', 17), ('plutot', 16), ('lors', 16), ('passion', 16), ('conseil', 16), ('equipes', 16), ('tant', 16), ('4', 16), ('defi', 16), ('lire', 16), ('valeurs', 16), ('olympisme', 16), ('semaine', 16), ('chez', 16), ('experts', 16), ('compte', 16), ('off', 16), ('piscine', 16), ('course', 16), ('personne', 16), ('st', 16), ('aime', 16), ('infrastructures', 16), ('jo2016', 16), ('mars', 15), ('programme', 15), ('avait', 15), ('co', 15), ('onesta', 15), ('medaille', 15), ('leurs', 15), ('sites', 15), ('reussite', 15), ('savoir', 15), ('2016', 15), ('vraiment', 15), ('com', 15), ('region', 15), ('heureux', 15), ('espere', 15), ('prevu', 15), ('premier', 15), ('8', 15), ('greves', 15), ('visite', 15), ('rg16', 15), ('etude', 15), ('agression', 15), ('lequipe', 14), ('video', 14), ('perspective', 14), ('centre', 14), ('ministre', 14), ('culture', 14), ('autres', 14), ('selon', 14), ('93', 14), ('chantier', 14), ('ssd93', 14), ('financer', 14), ('devant', 14), ('probleme', 14), ('succes', 14), ('rendez', 14), ('sante', 14), ('fin', 14), ('futur', 14), ('tokyo2020', 14), ('saisir', 14), ('journeeolympique', 14), ('surcout', 14), ('24', 13), ('europe1', 13), ('ah', 13), ('venir', 13), ('claude', 13), ('po', 13), ('autour', 13), ('pou', 13), ('startup', 13), ('pourrait', 13), ('partie', 13), ('pub', 13), ('gros', 13), ('donner', 13), ('eiffel', 13), ('grande', 13), ('ambition', 13), ('retombees', 13), ('economique', 13), ('vos', 13), ('gagner', 13), ('rdv', 13), ('performance', 13), ('beaucoup', 13), ('euro', 13), ('ile', 13), ('participer', 13), ('idf', 13), ('autre', 13), ('idees', 13), ('lignes', 13), ('hier', 13), ('champion', 13), ('reve', 13), ('jeunesse', 13), ('objectif', 13), ('personnes', 13), ('presse', 13), ('cur', 13), ('2020', 13), ('risque', 13), ('promouvoir', 13), ('expo2025', 13), ('eme', 13), ('cojo', 13), ('greve', 12), ('fouet', 12), ('voeu', 12), ('info', 12), ('accueille', 12), ('universelle', 12), ('vie', 12), ('occasion', 12), ('figaro', 12), ('grace', 12), ('formidable', 12), ('laura', 12), ('voila', 12), ('bilan', 12), ('tellement', 12), ('sommes', 12), ('cas', 12), ('cgt', 12), ('depart', 12), ('sponsors', 12), ('soutient', 12), ('quels', 12), ('16', 12), ('aurait', 12), ('travailler', 12), ('impact', 12), ('mettre', 12), ('aider', 12), ('serait', 12), ('crois', 12), ('financement', 12), ('c8', 12), ('populaire', 12), ('superbe', 12), ('avez', 12), ('autant', 12), ('martin', 12), ('plaisir', 12), ('lorganisation', 12), ('nombreuses', 12), ('education', 12), ('lancement', 12), ('developpement', 12), ('politique', 11), ('lance', 11), ('lundi', 11), ('retrouvez', 11), ('formations', 11), ('champ', 11), ('eu', 11), ('mise', 11), ('flessel', 11), ('13', 11), ('parce', 11), ('enjeu', 11), ('pense', 11), ('present', 11), ('revoir', 11), ('aucun', 11), ('allez', 11), ('concernant', 11), ('olympics', 11), ('salaire', 11), ('nouvel', 11), ('faudra', 11), ('gouv', 11), ('prendre', 11), ('afin', 11), ('car', 11), ('notamment', 11), ('presentation', 11), ('flamme', 11), ('role', 11), ('accelerateur', 11), ('publique', 11), ('invite', 11), ('1er', 11), ('economiques', 11), ('meilleur', 11), ('pratique', 11), ('article', 11), ('ive', 11), ('vite', 11), ('social', 11), ('moyens', 11), ('surf', 11), ('canousconcerne', 11), ('socialparty', 11), ('seancecd93', 11), ('nouvelles', 11), ('couts', 11), ('depassement', 11), ('renoncer', 11), ('enjeuxemplois', 11), ('met', 10), ('respect', 10), ('souhaite', 10), ('news', 10), ('afp', 10), ('tv', 10), ('etes', 10), ('actu', 10), ('demande', 10), ('engagement', 10), ('groupe', 10), ('vient', 10), ('ratp', 10), ('wapler', 10), ('gens', 10), ('14', 10), ('cadre', 10), ('attentat', 10), ('signe', 10), ('peuple', 10), ('force', 10), ('atout', 10), ('lutte', 10), ('entreprises', 10), ('histoire', 10), ('ni', 10), ('fassent', 10), ('falloir', 10), ('vote', 10), ('charge', 10), ('100', 10), ('societal', 10), ('publics', 10), ('enfin', 10), ('bonjour', 10), ('marche', 10), ('voit', 10), ('combien', 10), ('minutes', 10), ('1ere', 10), ('generation', 10), ('couleurs', 10), ('faites', 10), ('castaldi', 10), ('reunis', 10), ('magnifique', 10), ('sinon', 10), ('es', 10), ('marie', 10), ('pouvoir', 10), ('mme', 10), ('famille', 10), ('part', 10), ('belles', 10), ('decouvrir', 10), ('depense', 10), ('habitants', 10), ('rappel', 10), ('saintdenis', 10), ('plein', 10), ('devrait', 10), ('cause', 10), ('victimes', 10), ('group', 10), ('chers', 10), ('roadtorio', 10), ('partenaire', 10), ('bye', 10), ('environnement', 10), ('gestion', 10), ('sujet', 10), ('vaires', 10), ('territoires', 10), ('plainecommune', 10), ('calendrier', 10), ('tours', 10), ('victime', 10), ('mardi', 10), ('rwc2023', 10), ('bientot', 9), ('officielle', 9), ('go', 9), ('fond', 9), ('officialise', 9), ('cher', 9), ('pose', 9), ('hambourg', 9), ('surtout', 9), ('articuler', 9), ('17', 9), ('9', 9), ('jop2024', 9), ('service', 9), ('trouve', 9), ('logement', 9), ('deuros', 9), ('chaque', 9), ('commun', 9), ('prevenir', 9), ('mouvement', 9), ('paralympique', 9), ('the', 9), ('point', 9), ('deux', 9), ('reponse', 9), ('competition', 9), ('2017', 9), ('font', 9), ('ton', 9), ('enfants', 9), ('lui', 9), ('bleus', 9), ('attribution', 9), ('tt', 9), ('contribuables', 9), ('ecole', 9), ('aviron', 9), ('celui', 9), ('peine', 9), ('innovants', 9), ('pu', 9), ('e1', 9), ('republique', 9), ('facture', 9), ('depasse', 9), ('dont', 9), ('assmann', 9), ('eviter', 9), ('impots', 9), ('reste', 9), ('position', 9), ('business', 9), ('promotion', 9), ('nautique', 9), ('salle', 9), ('sociaux', 9), ('livraison', 9), ('obtenir', 9), ('situation', 9), ('jop', 9), ('atelier', 9), ('equipements', 9), ('commune', 9), ('delais', 9), ('metropole', 9), ('pyeongchang', 9), ('jo2018', 9), ('capitale', 8), ('2015', 8), ('belgique', 8), ('envie', 8), ('europe', 8), ('re', 8), ('by', 8), ('vois', 8), ('depenses', 8), ('fric', 8), ('lancer', 8), ('handicap', 8), ('hein', 8), ('honneur', 8), ('futurs', 8), ('formation', 8), ('analyse', 8), ('accelerer', 8), ('pierre', 8), ('responsables', 8), ('chose', 8), ('franciliens', 8), ('intervention', 8), ('artistes', 8), ('investissement', 8), ('candidate', 8), ('tweets', 8), ('heure', 8), ('vivre', 8), ('aller', 8), ('hote', 8), ('forme', 8), ('train', 8), ('psg', 8), ('parcours', 8), ('demander', 8), ('chanter', 8), ('haut', 8), ('niveau', 8), ('21', 8), ('prochains', 8), ('main', 8), ('photo', 8), ('sens', 8), ('vive', 8), ('vivement', 8), ('soient', 8), ('athlete', 8), ('retard', 8), ('plusieurs', 8), ('medias', 8), ('cote', 8), ('important', 8), ('jouer', 8), ('membres', 8), ('accueillera', 8), ('as', 8), ('surprise', 8), ('quotidien', 8), ('rendre', 8), ('porter', 8), ('rouge', 8), ('live', 8), ('marketing', 8), ('corruption', 8), ('200', 8), ('serieux', 8), ('mis', 8), ('enrichir', 8), ('remercie', 8), ('agi', 8), ('60', 8), ('nouveaux', 8), ('30', 8), ('passer', 8), ('evoquer', 8), ('club', 8), ('experience', 8), ('bernard', 8), ('points', 8), ('logements', 8), ('theme', 8), ('base', 8), ('partenaires', 8), ('doper', 8), ('rencontre', 8), ('aquatique', 8), ('aubervilliers', 8), ('sud', 8), ('derapages', 8), ('pointe', 8), ('revue', 8), ('partenariat', 8), ('championne', 8), ('meilleure', 8), ('marches', 8), ('sinquietent', 8), ('reels', 8), ('270', 8), ('fevrier', 8), ('trophee', 8), ('fourcade', 8), ('rmc', 7), ('assure', 7), ('iens', 7), ('pognon', 7), ('pro', 7), ('vrai', 7), ('ps', 7), ('candidat', 7), ('2028', 7), ('jean', 7), ('12', 7), ('exemple', 7), ('premiers', 7), ('petanque', 7), ('assurer', 7), ('septembre', 7), ('proposer', 7), ('londres', 7), ('membre', 7), ('viens', 7), ('bois', 7), ('annee', 7), ('conseilde', 7), ('mesures', 7), ('entier', 7), ('date', 7), ('pain', 7), ('modele', 7), ('unss', 7), ('ajoutons', 7), ('pb', 7), ('attends', 7), ('city', 7), ('gouffre', 7), ('pa', 7), ('stade2', 7), ('match', 7), ('franchement', 7), ('tete', 7), ('con', 7), ('samedi', 7), ('arrive', 7), ('80', 7), ('ancien', 7), ('solidaire', 7), ('commencer', 7), ('fier', 7), ('creer', 7), ('grandes', 7), ('transformer', 7), ('g', 7), ('seuls', 7), ('apl', 7), ('mobilise', 7), ('promet', 7), ('seance', 7), ('images', 7), ('liam', 7), ('nom', 7), ('reception', 7), ('sait', 7), ('feminine', 7), ('mes', 7), ('attend', 7), ('elu', 7), ('dites', 7), ('lieu', 7), ('oh', 7), ('mondiale', 7), ('celle', 7), ('matiere', 7), ('sauf', 7), ('avenir', 7), ('compris', 7), ('defend', 7), ('sociale', 7), ('commission', 7), ('pourtant', 7), ('v', 7), ('attendre', 7), ('vendre', 7), ('vrp', 7), ('espoir', 7), ('serai', 7), ('note', 7), ('organises', 7), ('ives', 7), ('futures', 7), ('mauvaise', 7), ('fiere', 7), ('mobiliser', 7), ('prochain', 7), ('choisi', 7), ('cotes', 7), ('torcy', 7), ('canoe', 7), ('collectivites', 7), ('direction', 7), ('puis', 7), ('presenter', 7), ('strategique', 7), ('hacking', 7), ('no', 7), ('bout', 7), ('directement', 7), ('importer', 7), ('transformation', 7), ('seinesaintdenis', 7), ('six', 7), ('topcom18', 7), ('2019', 7), ('rugbyworldcup2023', 7), ('23', 6), ('03', 6), ('prefererait', 6), ('actualites', 6), ('air', 6), ('avis', 6), ('confirmee', 6), ('dopage', 6), ('terme', 6), ('eux', 6), ('emplois', 6), ('discipline', 6), ('etions', 6), ('photos', 6), ('pauvres', 6), ('allons', 6), ('quelles', 6), ('oublier', 6), ('dispositif', 6), ('vitrine', 6), ('immobilier', 6), ('professionnels', 6), ('anti', 6), ('1924', 6), ('voici', 6), ('attractivite', 6), ('excellence', 6), ('exceptionnel', 6), ('nationale', 6), ('esprit', 6), ('pourraient', 6), ('logo', 6), ('mission', 6), ('up', 6), ('challenge', 6), ('derriere', 6), ('questions', 6), ('in', 6), ('venez', 6), ('piste', 6), ('solidaires', 6), ('boulot', 6), ('tele', 6), ('benefice', 6), ('promoteurs', 6), ('lima', 6), ('derniere', 6), ('langue', 6), ('pecresse', 6), ('pre', 6), ('parmi', 6), ('rmclive', 6), ('choses', 6), ('chantiers', 6), ('excellent', 6), ('ambiance', 6), ('serais', 6), ('ms', 6), ('eco', 6), ('malgre', 6), ('entrainement', 6), ('adjoint', 6), ('28', 6), ('partout', 6), ('bons', 6), ('guerre', 6), ('continue', 6), ('jeu', 6), ('dis', 6), ('rer', 6), ('fierte', 6), ('participe', 6), ('bel', 6), ('migrants', 6), ('prend', 6), ('trois', 6), ('mn', 6), ('permettre', 6), ('pleine', 6), ('vs', 6), ('interet', 6), ('diffusion', 6), ('echanges', 6), ('su', 6), ('suite', 6), ('tiens', 6), ('seule', 6), ('designation', 6), ('couter', 6), ('voile', 6), ('hors', 6), ('dernier', 6), ('stades', 6), ('recrutement', 6), ('lycee', 6), ('confiance', 6), ('mercredi', 6), ('emmanuel', 6), ('heures', 6), ('reuni', 6), ('championnat', 6), ('cnosf', 6), ('logique', 6), ('argument', 6), ('riner', 6), ('espritbleu', 6), ('construisons', 6), ('fini', 6), ('yeux', 6), ('decrocher', 6), ('share', 6), ('lobbying', 6), ('semble', 6), ('recrute', 6), ('realisation', 6), ('fort', 6), ('ouvert', 6), ('marne', 6), ('edition', 6), ('mobilises', 6), ('durables', 6), ('suivez', 6), ('sorbonne', 6), ('responsable', 6), ('renovation', 6), ('hooligans', 6), ('exemplaires', 6), ('mixite', 6), ('maison', 6), ('devoir', 6), ('beckers', 6), ('spor', 6), ('directeur', 6), ('fantastique', 6), ('yaura', 6), ('bordeaux', 6), ('mipim2018', 6), ('mascotte', 6), ('dorganisation', 6), ('prison', 6), ('secteur', 6), ('preparation', 6), ('sporsora18', 6), ('rugby2023', 6), ('challenges', 5), ('medtaha', 5), ('mit', 5), ('politiques', 5), ('run', 5), ('echos', 5), ('avril', 5), ('reticente', 5), ('madame', 5), ('depenser', 5), ('foutre', 5), ('paye', 5), ('confirme', 5), ('volonte', 5), ('vouloir', 5), ('gauche', 5), ('long', 5), ('obtention', 5), ('obtenu', 5), ('btp', 5), ('honte', 5), ('panne', 5), ('simone', 5), ('replay', 5), ('deficit', 5), ('celebre', 5), ('devient', 5), ('tribune', 5), ('preparent', 5), ('station', 5), ('35', 5), ('complet', 5), ('50', 5), ('master', 5), ('start', 5), ('prennent', 5), ('hulot', 5), ('ministere', 5), ('change', 5), ('pollution', 5), ('rentree', 5), ('vraie', 5), ('benevoles', 5), ('violence', 5), ('qualite', 5), ('inclusifs', 5), ('seul', 5), ('parc', 5), ('terrain', 5), ('kayak', 5), ('arnaque', 5), ('ailleurs', 5), ('sacre', 5), ('sdf', 5), ('09', 5), ('foot', 5), ('recoit', 5), ('tenir', 5), ('rats', 5), ('team', 5), ('gouvernance', 5), ('pcf', 5), ('felicite', 5), ('ecologique', 5), ('population', 5), ('tournoi', 5), ('doivent', 5), ('encourager', 5), ('epreuve', 5), ('chances', 5), ('triathlon', 5), ('surement', 5), ('future', 5), ('ambassadeur', 5), ('general', 5), ('couple', 5), ('investir', 5), ('laisse', 5), ('bah', 5), ('interview', 5), ('veux', 5), ('pourra', 5), ('show', 5), ('chercher', 5), ('semaines', 5), ('om', 5), ('quartier', 5), ('coucou', 5), ('rappelle', 5), ('midi', 5), ('unie', 5), ('climat', 5), ('capable', 5), ('programmation', 5), ('scene', 5), ('titre', 5), ('bonheur', 5), ('enquete', 5), ('desastre', 5), ('attention', 5), ('rue', 5), ('ultra', 5), ('tel', 5), ('excellente', 5), ('canal', 5), ('reconnaissance', 5), ('los', 5), ('francaises', 5), ('gagnant', 5), ('regarde', 5), ('diffuse', 5), ('amis', 5), ('participez', 5), ('feminin', 5), ('mondialise', 5), ('partager', 5), ('nat', 5), ('candidats', 5), ('profit', 5), ('palais', 5), ('laisser', 5), ('tapis', 5), ('manque', 5), ('reaction', 5), ('femmes', 5), ('naves', 5), ('possible', 5), ('venue', 5), ('partage', 5), ('nantes', 5), ('nicolas', 5), ('salon', 5), ('avance', 5), ('zone', 5), ('neuf', 5), ('charles', 5), ('fera', 5), ('braderiedelille', 5), ('quatre', 5), ('changer', 5), ('teddy', 5), ('braillard', 5), ('services', 5), ('total', 5), ('levier', 5), ('supplementaires', 5), ('decision', 5), ('athletisme', 5), ('pire', 5), ('travers', 5), ('comprendre', 5), ('sortie', 5), ('petits', 5), ('roissy', 5), ('national', 5), ('merite', 5), ('nuit', 5), ('rome', 5), ('inauguration', 5), ('drapeau', 5), ('paix', 5), ('amenagement', 5), ('financiers', 5), ('bassin', 5), ('quil', 5), ('efforts', 5), ('double', 5), ('exposition', 5), ('nombreux', 5), ('prochaine', 5), ('candidater', 5), ('contribuer', 5), ('minimum', 5), ('depute', 5), ('vairestorcy', 5), ('dun', 5), ('departemental', 5), ('pandathlon', 5), ('26', 5), ('600', 5), ('marville', 5), ('utile', 5), ('masseglia', 5), ('unique', 5), ('reseau', 5), ('proposez', 5), ('innovations', 5), ('mr', 5), ('innovantes', 5), ('extension', 5), ('football', 5), ('etc', 5), ('terroristes', 5), ('75', 5), ('ecrit', 5), ('eleves', 5), ('crossisf', 5), ('5g', 5), ('utiles', 5), ('fa', 5), ('estime', 5), ('solideo', 5), ('olivier', 5), ('epinaysurseine', 5), ('lexperience', 5), ('poste', 5), ('ouvre', 5), ('monsieur', 5), ('borne', 5), ('90', 5), ('ag', 5), ('loccasion', 5), ('necessaires', 5), ('cluzel', 5), ('honnete', 5), ('hermitage', 5), ('budgetaires', 5), ('ny', 5), ('reel', 5), ('lacanau', 5), ('infographie', 5), ('3eme', 5), ('brut', 5), ('bourget', 5), ('impose', 5), ('derogations', 5), ('sp', 5), ('lamottebeuvroncitedescavaliers', 5), ('sengage', 4), ('milliard', 4), ('maires', 4), ('conditions', 4), ('devra', 4), ('engage', 4), ('jdd', 4), ('ouest', 4), ('adresse', 4), ('finalement', 4), ('vitesse', 4), ('perdu', 4), ('trump', 4), ('attendu', 4), ('fonds', 4), ('apprendre', 4), ('ministres', 4), ('ns', 4), ('champs', 4), ('urbain', 4), ('coeur', 4), ('fallait', 4), ('roule', 4), ('energies', 4), ('mots', 4), ('lattribution', 4), ('entrepreneurs', 4), ('tweet', 4), ('profits', 4), ('ok', 4), ('sept', 4), ('certain', 4), ('cree', 4), ('voiture', 4), ('salue', 4), ('consultation', 4), ('expliquer', 4), ('joue', 4), ('reussi', 4), ('initial', 4), ('prives', 4), ('fou', 4), ('journalistes', 4), ('amour', 4), ('attentats', 4), ('terrorisme', 4), ('q', 4), ('bordel', 4), ('entraine', 4), ('catastrophe', 4), ('claque', 4), ('emotions', 4), ('orga', 4), ('handball', 4), ('saut', 4), ('fut', 4), ('reformer', 4), ('chante', 4), ('voulons', 4), ('espace', 4), ('contente', 4), ('assister', 4), ('peuvent', 4), ('quon', 4), ('assez', 4), ('rejouit', 4), ('k', 4), ('davoir', 4), ('bus', 4), ('mi', 4), ('b', 4), ('valerie', 4), ('maitrise', 4), ('merveilleux', 4), ('presque', 4), ('19', 4), ('internationale', 4), ('etaient', 4), ('trouver', 4), ('dur', 4), ('22', 4), ('jupiter', 4), ('dete', 4), ('jeveux2025', 4), ('we', 4), ('gout', 4), ('petite', 4), ('and', 4), ('to', 4), ('etais', 4), ('meilleurs', 4), ('troisieme', 4), ('convie', 4), ('presidents', 4), ('not', 4), ('connerie', 4), ('tpmp', 4), ('week', 4), ('end', 4), ('participation', 4), ('vieux', 4), ('i', 4), ('lebourget', 4), ('solidarite', 4), ('unis', 4), ('perec', 4), ('numero', 4), ('comprend', 4), ('parvis', 4), ('croissance', 4), ('revenu', 4), ('suivre', 4), ('mondial', 4), ('urgence', 4), ('angeles', 4), ('grave', 4), ('effet', 4), ('resume', 4), ('vo', 4), ('faineants', 4), ('anime', 4), ('gars', 4), ('gueule', 4), ('prithika', 4), ('21h', 4), ('travaille', 4), ('choc', 4), ('vision', 4), ('cinema', 4), ('dela', 4), ('retweeted', 4), ('yvelines', 4), ('frenchtech', 4), ('bfmtv', 4), ('cadeau', 4), ('menaces', 4), ('larem', 4), ('egalement', 4), ('benefices', 4), ('vais', 4), ('invites', 4), ('rever', 4), ('avion', 4), ('pdt', 4), ('longtemps', 4), ('comprends', 4), ('datagueule', 4), ('difficile', 4), ('accord', 4), ('amenagements', 4), ('prevues', 4), ('loisirs', 4), ('beaux', 4), ('qd', 4), ('vecteur', 4), ('vise', 4), ('arriver', 4), ('plaide', 4), ('hurtis', 4), ('gere', 4), ('11', 4), ('financiere', 4), ('soutenons', 4), ('departement', 4), ('citoyens', 4), ('pensee', 4), ('thierry', 4), ('presents', 4), ('luxe', 4), ('ciel', 4), ('art', 4), ('ferme', 4), ('finale', 4), ('tokyo', 4), ('lol', 4), ('loitravail', 4), ('prestige', 4), ('clip', 4), ('lice', 4), ('strategie', 4), ('fra', 4), ('pres', 4), ('existe', 4), ('perdre', 4), ('twitter', 4), ('faudrait', 4), ('parole', 4), ('odd', 4), ('puisse', 4), ('14juillet', 4), ('creation', 4), ('ateliers', 4), ('euh', 4), ('cdm', 4), ('ami', 4), ('rtlmatin', 4), ('priorite', 4), ('joli', 4), ('thomas', 4), ('soyons', 4), ('77', 4), ('plaine', 4), ('stage', 4), ('crue', 4), ('presidente', 4), ('avions', 4), ('dijon', 4), ('penser', 4), ('propositions', 4), ('solutions', 4), ('lensemble', 4), ('accelere', 4), ('sent', 4), ('accompagner', 4), ('actions', 4), ('avancer', 4), ('prevus', 4), ('jai', 4), ('heureusement', 4), ('mos', 4), ('innovant', 4), ('esperer', 4), ('adieu', 4), ('ilede', 4), ('veille', 4), ('etudes', 4), ('carte', 4), ('exemplarite', 4), ('fft', 4), ('doute', 4), ('designe', 4), ('poubelles', 4), ('jury', 4), ('aucune', 4), ('pris', 4), ('conf', 4), ('inclusion', 4), ('bureau', 4), ('deviennent', 4), ('cdg', 4), ('lemploi', 4), ('urbanisme', 4), ('declare', 4), ('ligf', 4), ('mairiede', 4), ('potentiels', 4), ('stadede', 4), ('organisateurs', 4), ('lambition', 4), ('actualite', 4), ('henard', 4), ('bene', 4), ('table', 4), ('so', 4), ('remis', 4), ('workshop', 4), ('decembre', 4), ('juge', 4), ('directeurs', 4), ('relations', 4), ('intervenants', 4), ('27', 4), ('beauc', 4), ('islamistes', 4), ('entreprise', 4), ('10milliards', 4), ('encourageantes', 4), ('limiter', 4), ('differents', 4), ('promis', 4), ('enveloppe', 4), ('prevue', 4), ('deurope', 4), ('possibles', 4), ('etudiants', 4), ('commande', 4), ('peux', 4), ('brexit', 4), ('r', 4), ('selection', 4), ('2024secondes', 4), ('leblog', 4), ('tnt', 4), ('sexuelle', 4), ('commencent', 4), ('exception', 4), ('privatisation', 4), ('majeur', 4), ('ligne18', 4), ('2030', 4), ('lifting', 4), ('laudiovisuel', 4), ('tlt2017', 4), ('boston', 3), ('grece', 3), ('rtl', 3), ('nice', 3), ('alaune', 3), ('flop', 3), ('lesechos', 3), ('cetait', 3), ('more', 3), ('lund', 3), ('lille', 3), ('nord', 3), ('liberation', 3), ('quasi', 3), ('cesse', 3), ('ecologie', 3), ('moitie', 3), ('echange', 3), ('securiser', 3), ('500millions', 3), ('entendre', 3), ('suspendue', 3), ('sais', 3), ('nombre', 3), ('instant', 3), ('coulisses', 3), ('cavous', 3), ('assures', 3), ('format', 3), ('ramener', 3), ('doigts', 3), ('energie', 3), ('vendredi', 3), ('brigittemacron', 3), ('investissements', 3), ('permet', 3), ('combat', 3), ('batiment', 3), ('touristique', 3), ('destination', 3), ('derape', 3), ('anneaux', 3), ('henri', 3), ('ls', 3), ('antilles', 3), ('quelque', 3), ('congres', 3), ('aurai', 3), ('environnementale', 3), ('extraordinaire', 3), ('regions', 3), ('delegations', 3), ('internationales', 3), ('musique', 3), ('collectif', 3), ('esperant', 3), ('2000', 3), ('felicitation', 3), ('rh', 3), ('habitude', 3), ('accessibilite', 3), ('battre', 3), ('particuliere', 3), ('d1', 3), ('gratuite', 3), ('marques', 3), ('selfie', 3), ('finance', 3), ('politiciens', 3), ('prive', 3), ('70', 3), ('promo', 3), ('haute', 3), ('professionnel', 3), ('ensuite', 3), ('genre', 3), ('golf', 3), ('remettre', 3), ('pau', 3), ('droite', 3), ('fais', 3), ('parcdesprinces', 3), ('test', 3), ('event', 3), ('emission', 3), ('claudeonesta', 3), ('jolie', 3), ('recompense', 3), ('concours', 3), ('animation', 3), ('dehors', 3), ('fallu', 3), ('lettre', 3), ('circulation', 3), ('etonnes', 3), ('offrir', 3), ('patrimoine', 3), ('coca', 3), ('annoncer', 3), ('clubs', 3), ('themes', 3), ('evoque', 3), ('officiellement', 3), ('competitions', 3), ('fetedelhuma', 3), ('historique', 3), ('devenus', 3), ('bd', 3), ('gagnante', 3), ('bach', 3), ('visiter', 3), ('internationaux', 3), ('chansons', 3), ('retirer', 3), ('ollier', 3), ('remercier', 3), ('arguments', 3), ('valeur', 3), ('maximum', 3), ('max', 3), ('polemiques', 3), ('carton', 3), ('pretexte', 3), ('delire', 3), ('audencia', 3), ('baule', 3), ('transforme', 3), ('croyais', 3), ('chloe', 3), ('avantage', 3), ('aubaine', 3), ('imaginer', 3), ('eurovision', 3), ('choisir', 3), ('booster', 3), ('mdr', 3), ('saluer', 3), ('95', 3), ('dynamique', 3), ('mobilisees', 3), ('inscrire', 3), ('pitie', 3), ('vrais', 3), ('rigueur', 3), ('fans', 3), ('sympathique', 3), ('venu', 3), ('elite', 3), ('francoishollande', 3), ('version', 3), ('imaginez', 3), ('auraient', 3), ('aurais', 3), ('trio', 3), ('exceptionnelle', 3), ('prestation', 3), ('finances', 3), ('sheila', 3), ('lumiere', 3), ('passage', 3), ('helenefr', 3), ('communion', 3), ('gerer', 3), ('cirque', 3), ('christophe', 3), ('nomination', 3), ('couvrir', 3), ('imagine', 3), ('diffuser', 3), ('sorte', 3), ('chacun', 3), ('zimbalist', 3), ('circus', 3), ('maximus', 3), ('benef', 3), ('acceleration', 3), ('lhotel', 3), ('auront', 3), ('annoncees', 3), ('putain', 3), ('attaque', 3), ('devenu', 3), ('liesse', 3), ('montre', 3), ('voix', 3), ('regarder', 3), ('voies', 3), ('ch', 3), ('magazine', 3), ('pavade', 3), ('urbaine', 3), ('presidence', 3), ('voulait', 3), ('lancee', 3), ('scol', 3), ('drole', 3), ('pib', 3), ('dirait', 3), ('2e', 3), ('tremplin', 3), ('lutter', 3), ('horaires', 3), ('rentre', 3), ('45', 3), ('problemes', 3), ('organisera', 3), ('craindre', 3), ('ga', 3), ('cons', 3), ('beneficier', 3), ('acteur', 3), ('buffet', 3), ('necessaire', 3), ('compter', 3), ('volley', 3), ('essentiel', 3), ('blancs', 3), ('scolaire', 3), ('pme', 3), ('discours', 3), ('gvt', 3), ('cool', 3), ('perso', 3), ('quartiers', 3), ('terre', 3), ('recus', 3), ('supporters', 3), ('durant', 3), ('ex', 3), ('invitation', 3), ('ancienne', 3), ('rentrer', 3), ('billets', 3), ('objectifs', 3), ('of', 3), ('tenue', 3), ('vp', 3), ('media', 3), ('betonnage', 3), ('150', 3), ('recu', 3), ('metros', 3), ('maniere', 3), ('lobtention', 3), ('prochaines', 3), ('quid', 3), ('relais', 3), ('ceremoniedouverture', 3), ('bande', 3), ('clairement', 3), ('prenez', 3), ('jespere', 3), ('openingceremony', 3), ('mauvais', 3), ('barre', 3), ('cyclisme', 3), ('annule', 3), ('lt', 3), ('aout', 3), ('rend', 3), ('atouts', 3), ('jeux2024', 3), ('spectateurs', 3), ('desastreuse', 3), ('bresil', 3), ('entree', 3), ('dommage', 3), ('bonnes', 3), ('comptable', 3), ('resultats', 3), ('ruine', 3), ('cesar', 3), ('tard', 3), ('derogation', 3), ('proche', 3), ('representant', 3), ('resultat', 3), ('personnel', 3), ('escalade', 3), ('murs', 3), ('wifi', 3), ('totalement', 3), ('economiste', 3), ('arte', 3), ('influence', 3), ('savait', 3), ('numerique', 3), ('chef', 3), ('initiative', 3), ('bresiliens', 3), ('tantale', 3), ('mort', 3), ('annehidalgo', 3), ('thevenoud', 3), ('patron', 3), ('potentiel', 3), ('rome2024', 3), ('portera', 3), ('demesure', 3), ('proposition', 3), ('bleu', 3), ('blanc', 3), ('inclusive', 3), ('implication', 3), ('defaite', 3), ('page', 3), ('hauteur', 3), ('uefa', 3), ('puisque', 3), ('proximite', 3), ('utilite', 3), ('allait', 3), ('min', 3), ('tech', 3), ('affaires', 3), ('decouvrez', 3), ('allezlesbleus', 3), ('symbole', 3), ('mondiaux', 3), ('lapasset', 3), ('diversite', 3), ('usagers', 3), ('ess', 3), ('investit', 3), ('socialmedia', 3), ('rooftop', 3), ('1e', 3), ('echanger', 3), ('bienvenue', 3), ('heureuse', 3), ('connectee', 3), ('rydercup', 3), ('convention', 3), ('blague', 3), ('joueurs', 3), ('handi', 3), ('federations', 3), ('accompagnement', 3), ('cloture', 3), ('wwf', 3), ('operationmontblanc', 3), ('relever', 3), ('grosse', 3), ('organisee', 3), ('protocole', 3), ('chaud', 3), ('face', 3), ('matinee', 3), ('territorial', 3), ('5000', 3), ('action', 3), ('mobilisent', 3), ('leducation', 3), ('largement', 3), ('suisse', 3), ('loin', 3), ('casseurs', 3), ('expouniverselle', 3), ('dangers', 3), ('entretien', 3), ('laccueil', 3), ('ameliorer', 3), ('tennis', 3), ('05', 3), ('env', 3), ('pied', 3), ('guide', 3), ('peser', 3), ('rythme', 3), ('parait', 3), ('recevoir', 3), ('flinguer', 3), ('voie', 3), ('vibrer', 3), ('restitution', 3), ('xpotfens', 3), ('bouger', 3), ('inquiete', 3), ('decide', 3), ('cata', 3), ('otage', 3), ('pis', 3), ('voulez', 3), ('publiques', 3), ('promesses', 3), ('sponsoring', 3), ('concept', 3), ('lesprit', 3), ('travailleurs', 3), ('installation', 3), ('faisant', 3), ('structures', 3), ('disponible', 3), ('prises', 3), ('vi', 3), ('monaco', 3), ('do', 3), ('passerelle', 3), ('demi', 3), ('electrique', 3), ('changement', 3), ('igf', 3), ('zac', 3), ('lrem', 3), ('ecolos', 3), ('maitriser', 3), ('respecte', 3), ('generale', 3), ('kfr', 3), ('supplementaire', 3), ('bref', 3), ('hopitaux', 3), ('addition', 3), ('traduction', 3), ('communique', 3), ('enjeuxemploi', 3), ('utilisera', 3), ('nomme', 3), ('impact2', 3), ('ronde', 3), ('statut', 3), ('metiers', 3), ('local', 3), ('aujourdhui', 3), ('thibault', 3), ('organisations', 3), ('egalite', 3), ('classes', 3), ('lhistoire', 3), ('respecter', 3), ('croire', 3), ('voyageur', 3), ('arbitrages', 3), ('seminaire', 3), ('journal', 3), ('viseur', 3), ('copie', 3), ('majeurs', 3), ('court', 3), ('toulousain', 3), ('preconise', 3), ('speculation', 3), ('publier', 3), ('rectificatif', 3), ('dg', 3), ('debattre', 3), ('recteur', 3), ('cnds', 3), ('directrice', 3), ('jeuxparalympiques', 3), ('infrastructure', 3), ('architecture', 3), ('economies', 3), ('benevole', 3), ('transitionenergetique', 3), ('dici', 3), ('journees', 3), ('lheritage', 3), ('dix', 3), ('deraper', 3), ('lyon', 3), ('domaine', 3), ('chefs', 3), ('places', 3), ('sincerite', 3), ('detriment', 3), ('pioche', 3), ('lhonneur', 3), ('hauts', 3), ('financieres', 3), ('croit', 3), ('adoption', 3), ('penche', 3), ('senat', 3), ('enorme', 3), ('locaux', 3), ('tendance', 3), ('nest', 3), ('charte', 3), ('bord', 3), ('denormandie', 3), ('assemblee', 3), ('grandir', 3), ('qua', 3), ('dautres', 3), ('systeme', 3), ('hermitageplaza', 3), ('hautes', 3), ('discussion', 3), ('u', 3), ('deroulera', 3), ('environnemental', 3), ('record', 3), ('acheter', 3), ('hackathon', 3), ('normes', 3), ('realisee', 3), ('arena', 3), ('fiers', 3), ('vehicules', 3), ('lalyceenne', 3), ('envoi', 3), ('agresse', 3), ('loger', 3), ('salaires', 3), ('effort', 3), ('touchera', 3), ('velib', 3), ('accompagne', 3), ('remunerations', 3), ('hop', 3), ('bruts', 3), ('diner', 3), ('vente', 3), ('justice', 3), ('sonne', 3), ('peyongchang2018', 3), ('emmanuelmacron', 3), ('sujets', 3), ('imposer', 3), ('lois', 3), ('ainsi', 3), ('chauds', 3), ('revu', 3), ('portant', 3), ('progres', 3), ('sponsor', 3), ('chatelet', 3), ('aom', 3), ('presence', 3), ('pratiques', 3), ('prioritaires', 3), ('fermer', 3), ('outremer', 3), ('perspectives', 3), ('audiovisuel', 3), ('ffr', 3), ('leader', 3), ('d2s', 3), ('gymnases', 2), ('ladepeche', 2), ('conditionne', 2), ('principes', 2), ('french', 2), ('pharaoniques', 2), ('lent', 2), ('lejdd', 2), ('infos', 2), ('engagee', 2), ('decideurs', 2), ('skb', 2), ('cour', 2), ('liberes', 2), ('secu', 2), ('lepoint', 2), ('bataille', 2), ('amende', 2), ('quot', 2), ('breaking', 2), ('infoz', 2), ('condit', 2), ('reconnait', 2), ('craquer', 2), ('lu', 2), ('new', 2), ('post', 2), ('urgent', 2), ('youpi', 2), ('ben', 2), ('ecarte', 2), ('referendum', 2), ('blog', 2), ('0', 2), ('nettoyer', 2), ('propres', 2), ('verre', 2), ('vide', 2), ('frustration', 2), ('qques', 2), ('onu', 2), ('rire', 2), ('julien', 2), ('chinois', 2), ('squash', 2), ('6milliards', 2), ('sert', 2), ('secteurs', 2), ('reparlera', 2), ('crees', 2), ('profitera', 2), ('minorite', 2), ('augmenter', 2), ('venus', 2), ('diplomatie', 2), ('isback', 2), ('parfaitement', 2), ('maximiser', 2), ('artisan', 2), ('denoncer', 2), ('capitalistes', 2), ('net', 2), ('rejoignez', 2), ('ayant', 2), ('anestaps', 2), ('7ans', 2), ('pardon', 2), ('adversaire', 2), ('orbite', 2), ('vetustes', 2), ('stif', 2), ('interets', 2), ('futsal', 2), ('rappele', 2), ('truc', 2), ('releve', 2), ('offre', 2), ('chic', 2), ('forum', 2), ('clandestins', 2), ('peripherique', 2), ('danielle', 2), ('tousathletesdelaville', 2), ('hoteldeville', 2), ('informer', 2), ('ravis', 2), ('restez', 2), ('lile', 2), ('optimisme', 2), ('geometres', 2), ('mur', 2), ('taquet', 2), ('placee', 2), ('resistance', 2), ('banlieue', 2), ('dons', 2), ('36', 2), ('feu', 2), ('vert', 2), ('soutiens', 2), ('like', 2), ('games', 2), ('groupes', 2), ('beaujoire', 2), ('homologue', 2), ('orange', 2), ('projecteurs', 2), ('rera', 2), ('endormir', 2), ('legitime', 2), ('paralympics', 2), ('temporairement', 2), ('facade', 2), ('magasin', 2), ('teaser', 2), ('qvt', 2), ('restent', 2), ('repondre', 2), ('appels', 2), ('doffres', 2), ('hey', 2), ('copains', 2), ('milieu', 2), ('bosser', 2), ('capables', 2), ('circulaires', 2), ('occulter', 2), ('livingiswinning', 2), ('endettee', 2), ('champagne', 2), ('information', 2), ('dessert', 2), ('bashing', 2), ('etrangers', 2), ('materiel', 2), ('pieds', 2), ('missionne', 2), ('amateurs', 2), ('grosses', 2), ('attirer', 2), ('malmont', 2), ('valoriser', 2), ('mp', 2), ('entrer', 2), ('world', 2), ('gerard', 2), ('collomb', 2), ('slogan', 2), ('anglais', 2), ('rappeler', 2), ('unanimite', 2), ('hand', 2), ('emmerder', 2), ('dopes', 2), ('bites', 2), ('occuper', 2), ('gestes', 2), ('moquee', 2), ('studio', 2), ('plage', 2), ('excellents', 2), ('souvenirs', 2), ('educatif', 2), ('affiche', 2), ('voiles', 2), ('avaler', 2), ('pistes', 2), ('cyclables', 2), ('250', 2), ('airbnb', 2), ('lanina', 2), ('bol', 2), ('66minutes', 2), ('fiasco', 2), ('connu', 2), ('rejouir', 2), ('mensuels', 2), ('nonaux', 2), ('eternite', 2), ('perche', 2), ('visiblement', 2), ('termes', 2), ('jp', 2), ('du71', 2), ('optique', 2), ('elan', 2), ('ammour', 2), ('selectionneur', 2), ('pareil', 2), ('amenages', 2), ('annonces', 2), ('souvenir', 2), ('reussir', 2), ('jeudi', 2), ('election', 2), ('bercy', 2), ('gymnastique', 2), ('hymne', 2), ('stand', 2), ('tc', 2), ('muer', 2), ('contrat', 2), ('deniers', 2), ('personnels', 2), ('depasseme', 2), ('positif', 2), ('annuler', 2), ('mesurer', 2), ('daesh', 2), ('concurrents', 2), ('quest', 2), ('quils', 2), ('legerement', 2), ('mensonges', 2), ('lerequisitoire', 2), ('permanente', 2), ('semi', 2), ('enmarche', 2), ('legrandrdv', 2), ('telle', 2), ('prouver', 2), ('loupbureau', 2), ('conseils', 2), ('offrons', 2), ('serions', 2), ('optimiste', 2), ('trialb', 2), ('generations', 2), ('volontaire', 2), ('simplement', 2), ('respirer', 2), ('paul', 2), ('interdit', 2), ('propagande', 2), ('decider', 2), ('barrieres', 2), ('droits', 2), ('tireront', 2), ('arretez', 2), ('gag', 2), ('peche', 2), ('gagnes', 2), ('difficiles', 2), ('polynesie', 2), ('incivilites', 2), ('oublie', 2), ('peter', 2), ('gestionnaires', 2), ('ose', 2), ('farouchement', 2), ('regler', 2), ('h', 2), ('probablement', 2), ('ecoute', 2), ('grincheux', 2), ('rarement', 2), ('recette', 2), ('marrer', 2), ('ravi', 2), ('cat', 2), ('rejouissent', 2), ('federe', 2), ('clhebdo', 2), ('ramer', 2), ('langage', 2), ('message', 2), ('explose', 2), ('levallois', 2), ('it', 2), ('etats', 2), ('sandro', 2), ('decouvre', 2), ('2014', 2), ('liampayne', 2), ('special', 2), ('invitee', 2), ('pleniere', 2), ('payne', 2), ('principaux', 2), ('marseillaise', 2), ('plateau', 2), ('gaver', 2), ('animateurs', 2), ('david', 2), ('immense', 2), ('evasionfiscale', 2), ('retrouve', 2), ('indecent', 2), ('bp', 2), ('garde', 2), ('faute', 2), ('esperons', 2), ('economiser', 2), ('ecologistes', 2), ('reserves', 2), ('chanson', 2), ('barth', 2), ('incroyable', 2), ('titres', 2), ('noir', 2), ('addictions', 2), ('etatsdamour', 2), ('single', 2), ('attribuer', 2), ('canape', 2), ('celebr', 2), ('habitan', 2), ('neoliberal', 2), ('anticorruption', 2), ('ru', 2), ('devront', 2), ('lengagement', 2), ('dinvestissements', 2), ('libre', 2), ('considerable', 2), ('gratuit', 2), ('usa', 2), ('rennes', 2), ('merde', 2), ('fan', 2), ('militaire', 2), ('anciens', 2), ('ecart', 2), ('budgets', 2), ('dette', 2), ('connais', 2), ('lumieres', 2), ('dieu', 2), ('desole', 2), ('lieux', 2), ('standards', 2), ('superflue', 2), ('wearethechampions', 2), ('profite', 2), ('recrue', 2), ('taekwondo', 2), ('berge', 2), ('dedans', 2), ('mec', 2), ('pronostique', 2), ('decernee', 2), ('claire', 2), ('coudray', 2), ('attribue', 2), ('terroriste', 2), ('cri', 2), ('voter', 2), ('disait', 2), ('depasser', 2), ('dettepublique', 2), ('contribue', 2), ('dir', 2), ('nez', 2), ('refugies', 2), ('chaine', 2), ('michel', 2), ('gerald', 2), ('favier', 2), ('jt', 2), ('manifs', 2), ('rendus', 2), ('mot', 2), ('qualifiera', 2), ('9h', 2), ('10h', 2), ('acces', 2), ('uns', 2), ('paieront', 2), ('dira', 2), ('arcenciel', 2), ('lr', 2), ('rare', 2), ('bras', 2), ('repond', 2), ('podium', 2), ('chirac', 2), ('dents', 2), ('2eme', 2), ('lits', 2), ('equation', 2), ('parfaite', 2), ('decathlon', 2), ('federateur', 2), ('frontieres', 2), ('viennent', 2), ('1ers', 2), ('alignes', 2), ('eloigne', 2), ('hotelier', 2), ('negocie', 2), ('20h55', 2), ('pouvez', 2), ('visibilite', 2), ('attendues', 2), ('importantes', 2), ('finir', 2), ('acceder', 2), ('jeune', 2), ('sarko', 2), ('inquietent', 2), ('fromage', 2), ('savez', 2), ('rejoint', 2), ('nou', 2), ('comm', 2), ('fetes', 2), ('feuille', 2), ('edito', 2), ('consorts', 2), ('indecence', 2), ('aimerais', 2), ('psdt', 2), ('cessent', 2), ('irma', 2), ('element', 2), ('portee', 2), ('rapide', 2), ('mahe', 2), ('propos', 2), ('parfois', 2), ('tirer', 2), ('400', 2), ('precede', 2), ('conseille', 2), ('partir', 2), ('nathalie', 2), ('interieur', 2), ('mollky', 2), ('films', 2), ('stadium', 2), ('asso', 2), ('mate', 2), ('arrives', 2), ('bloquer', 2), ('aventure', 2), ('orientation', 2), ('malheureusement', 2), ('accessible', 2), ('patrick', 2), ('evitera', 2), ('johnny', 2), ('ceremonies', 2), ('regardent', 2), ('braderie', 2), ('inspire', 2), ('cite', 2), ('equilibre', 2), ('decalage', 2), ('horaire', 2), ('3e', 2), ('annulation', 2), ('insecurite', 2), ('130', 2), ('acquis', 2), ('tombe', 2), ('aubry', 2), ('rayonner', 2), ('nona', 2), ('afficher', 2), ('centrale', 2), ('11md', 2), ('3md', 2), ('invest', 2), ('eve', 2), ('vaut', 2), ('exces', 2), ('ira', 2), ('proteger', 2), ('agonie', 2), ('olympicgames', 2), ('espaces', 2), ('incroyables', 2), ('inspirent', 2), ('croyons', 2), ('muriel', 2), ('douverture', 2), ('dettes', 2), ('echec', 2), ('unite', 2), ('incapables', 2), ('votons', 2), ('contribuable', 2), ('uvre', 2), ('uvres', 2), ('nationaux', 2), ('recherche', 2), ('source', 2), ('depit', 2), ('120', 2), ('1998', 2), ('tarif', 2), ('hd', 2), ('divers', 2), ('merkel', 2), ('etranger', 2), ('voyage', 2), ('lamour', 2), ('decrypte', 2), ('negociations', 2), ('ruiner', 2), ('fh', 2), ('ilove', 2), ('retrait', 2), ('condition', 2), ('gabegie', 2), ('spectacle', 2), ('ephemere', 2), ('coin', 2), ('lecture', 2), ('eventuel', 2), ('priorites', 2), ('derniers', 2), ('svp', 2), ('chere', 2), ('budapest', 2), ('precis', 2), ('connaitre', 2), ('395', 2), ('accessibles', 2), ('bmx', 2), ('decidement', 2), ('favori', 2), ('cartes', 2), ('candidates', 2), ('la2024', 2), ('budapest2024', 2), ('ordre', 2), ('centenaire', 2), ('elephants', 2), ('gayant', 2), ('2004', 2), ('pascal', 2), ('confortable', 2), ('contexte', 2), ('integrale', 2), ('evenementiel', 2), ('cle', 2), ('pedagogique', 2), ('losangeles2024', 2), ('poser', 2), ('convaincre', 2), ('ecrire', 2), ('pubs', 2), ('sympa', 2), ('para', 2), ('blueroom', 2), ('askestanguet', 2), ('paille', 2), ('apporte', 2), ('concurrence', 2), ('cap', 2), ('principal', 2), ('creatif', 2), ('dames', 2), ('positionne', 2), ('irmes', 2), ('fils', 2), ('film', 2), ('expriment', 2), ('14juillet2016', 2), ('bastilleday', 2), ('inedit', 2), ('algoe', 2), ('marchespublics', 2), ('baigner', 2), ('cliquez', 2), ('audace', 2), ('capacite', 2), ('ouverts', 2), ('inclusif', 2), ('aimerai', 2), ('pont', 2), ('suivi', 2), ('reflexion', 2), ('symboles', 2), ('participent', 2), ('val', 2), ('travaillent', 2), ('prise', 2), ('lci', 2), ('insiste', 2), ('damien', 2), ('givelet', 2), ('lefilm', 2), ('levenement', 2), ('sondage', 2), ('nageurs', 2), ('pouvait', 2), ('euro2016final', 2), ('frapor', 2), ('fiersdetrebleus', 2), ('porfra', 2), ('ethique', 2), ('etienne', 2), ('mobilisatrice', 2), ('gds', 2), ('confond', 2), ('fraall', 2), ('congresmev', 2), ('eventprofs', 2), ('cdgexpress', 2), ('plateforme', 2), ('dynamiser', 2), ('partageons', 2), ('judiciaire', 2), ('forts', 2), ('controle', 2), ('derive', 2), ('souligne', 2), ('linteret', 2), ('tian', 2), ('contraire', 2), ('produit', 2), ('solide', 2), ('methode', 2), ('dev', 2), ('repondu', 2), ('aupres', 2), ('kanner', 2), ('nautiques', 2), ('tpms', 2), ('energetique', 2), ('valide', 2), ('elysees', 2), ('juillet', 2), ('mer', 2), ('a380', 2), ('soutiennent', 2), ('candid', 2), ('exclusivite', 2), ('repetitions', 2), ('valeyre', 2), ('dimanche', 2), ('developpementdurable', 2), ('votez', 2), ('pretes', 2), ('openinnovation', 2), ('paradoxe', 2), ('universite', 2), ('openinno', 2), ('trottoirs', 2), ('ments', 2), ('techniques', 2), ('signature', 2), ('ecoliers', 2), ('formule', 2), ('800', 2), ('yves', 2), ('manoir', 2), ('regional', 2), ('nager', 2), ('porteur', 2), ('placer', 2), ('discriminations', 2), ('dinvestissement', 2), ('departementaux', 2), ('emis', 2), ('prefere', 2), ('represente', 2), ('raggi', 2), ('aurons', 2), ('representee', 2), ('cannes', 2), ('autographes', 2), ('ci', 2), ('ufs', 2), ('derouler', 2), ('mobilisez', 2), ('leuro', 2), ('biz', 2), ('tonyestanguet', 2), ('contributions', 2), ('ressortir', 2), ('courts', 2), ('ggrmc', 2), ('postale', 2), ('envoyez', 2), ('quentinois', 2), ('impatience', 2), ('pensez', 2), ('jij2016', 2), ('singapour', 2), ('continuer', 2), ('laquelle', 2), ('ethics', 2), ('serresauteuil', 2), ('bizarrement', 2), ('incidents', 2), ('poursuit', 2), ('tache', 2), ('nettoyage', 2), ('antifa', 2), ('police', 2), ('cran', 2), ('engrus', 2), ('exemplaire', 2), ('rendu', 2), ('definitivement', 2), ('colombes', 2), ('barcamp', 2), ('massacre', 2), ('serresdauteuil', 2), ('brainstorming', 2), ('ar', 2), ('sachez', 2), ('reglement', 2), ('innovateurs', 2), ('bjr', 2), ('epargner', 2), ('sociales', 2), ('ordures', 2), ('marre', 2), ('apparemment', 2), ('aboutir', 2), ('ta', 2), ('tresor', 2), ('makunda', 2), ('organisent', 2), ('inutile', 2), ('eboueurs', 2), ('jeuxolympique', 2), ('servi', 2), ('actes', 2), ('tissu', 2), ('pole', 2), ('athle', 2), ('lvmh', 2), ('hashtag', 2), ('respectee', 2), ('aient', 2), ('engie', 2), ('centreaquatique', 2), ('rapports', 2), ('eventuels', 2), ('piscines', 2), ('relativement', 2), ('restreint', 2), ('menace', 2), ('municipales', 2), ('entrainer', 2), ('institutionnels', 2), ('souffle', 2), ('lyceens', 2), ('lyceennes', 2), ('34', 2), ('demolir', 2), ('parking', 2), ('foyer', 2), ('conclusions', 2), ('saintouen', 2), ('japon', 2), ('spo', 2), ('2h', 2), ('exemples', 2), ('daccueillir', 2), ('rabais', 2), ('conneries', 2), ('impacts', 2), ('atteindre', 2), ('donnent', 2), ('arcu', 2), ('decisions', 2), ('devraient', 2), ('av', 2), ('protection', 2), ('inquietant', 2), ('tot', 2), ('depassements', 2), ('doutes', 2), ('television', 2), ('geante', 2), ('dimaria', 2), ('neymar', 2), ('nojo', 2), ('enfouissement', 2), ('scolaires', 2), ('echelle', 2), ('exige', 2), ('masse', 2), ('saulnier', 2), ('ceci', 2), ('briller', 2), ('letat', 2), ('mettent', 2), ('utiliser', 2), ('rattrapage', 2), ('jodd', 2), ('disais', 2), ('seraient', 2), ('vendu', 2), ('defendon', 2), ('evaluent', 2), ('doubler', 2), ('pop', 2), ('specialiste', 2), ('million', 2), ('comptes', 2), ('connait', 2), ('envoye', 2), ('caprice', 2), ('ambassadeurs', 2), ('abri', 2), ('ambitieux', 2), ('evalue', 2), ('facon', 2), ('avertissement', 2), ('500m', 2), ('details', 2), ('olym', 2), ('final', 2), ('normal', 2), ('certaines', 2), ('majeures', 2), ('etonnant', 2), ('pere', 2), ('large', 2), ('pl', 2), ('leymarie', 2), ('critique', 2), ('bar', 2), ('salee', 2), ('arrivent', 2), ('dinfrastructures', 2), ('enfumage', 2), ('monter', 2), ('largent', 2), ('fonction', 2), ('territoriale', 2), ('engagees', 2), ('meet', 2), ('pleyel', 2), ('janvier', 2), ('exigences', 2), ('signataire', 2), ('danger', 2), ('tient', 2), ('jen', 2), ('soi', 2), ('disciplines', 2), ('integrer', 2), ('plastif', 2), ('courage', 2), ('tes', 2), ('lucas', 2), ('planet', 2), ('citizens', 2), ('language', 2), ('training', 2), ('developper', 2), ('competences', 2), ('laboratoire', 2), ('anticiper', 2), ('serein', 2), ('favoriser', 2), ('detection', 2), ('maintient', 2), ('collegues', 2), ('exclusivites', 2), ('comptez', 2), ('fiscalite', 2), ('machine', 2), ('cafe', 2), ('menu', 2), ('administration', 2), ('6ans', 2), ('apporter', 2), ('transcender', 2), ('forte', 2), ('attente', 2), ('delicats', 2), ('see', 2), ('guyforget', 2), ('publication', 2), ('tou', 2), ('letude', 2), ('riche', 2), ('marchand', 2), ('immobiliere', 2), ('devait', 2), ('personnellement', 2), ('preparatifs', 2), ('croyez', 2), ('mahiedine', 2), ('mekhissi', 2), ('10km', 2), ('dossiers', 2), ('najdovski', 2), ('taxi', 2), ('longue', 2), ('revient', 2), ('lancons', 2), ('gagnera', 2), ('edf', 2), ('renforce', 2), ('hautesaone', 2), ('mvt', 2), ('orientations', 2), ('cavecchi', 2), ('christin', 2), ('202', 2), ('revez', 2), ('collectifs', 2), ('profs', 2), ('sein', 2), ('morning', 2), ('meeting', 2), ('comble', 2), ('cache', 2), ('citizen', 2), ('attractive', 2), ('uniquement', 2), ('parite', 2), ('polemique', 2), ('veulent', 2), ('management', 2), ('anna', 2), ('operer', 2), ('detablissement', 2), ('garder', 2), ('hugo', 2), ('jy', 2), ('renforcer', 2), ('partenariats', 2), ('lien', 2), ('bayle', 2), ('arras', 2), ('temoignage', 2), ('petition', 2), ('college', 2), ('4mds', 2), ('seulement', 2), ('ep', 2), ('rubrique', 2), ('19h', 2), ('beaute', 2), ('revele', 2), ('eclatant', 2), ('regioncentrevaldeloire', 2), ('demotions', 2), ('insincere', 2), ('pression', 2), ('boucle', 2), ('armand', 2), ('rendinger', 2), ('inities', 2), ('sest', 2), ('pontoise', 2), ('ultramoderne', 2), ('din', 2), ('priori', 2), ('categorie', 2), ('silence', 2), ('realiste', 2), ('ti', 2), ('preuve', 2), ('promoteur', 2), ('russe', 2), ('jumelles', 2), ('plaza', 2), ('appartements', 2), ('attendus', 2), ('premieres', 2), ('previsible', 2), ('evoluer', 2), ('story', 2), ('insoumise', 2), ('soutenu', 2), ('autrement', 2), ('dem', 2), ('regles', 2), ('expert', 2), ('organisat', 2), ('particulierement', 2), ('low', 2), ('rassurante', 2), ('pjl', 2), ('japonais', 2), ('chambres', 2), ('inquietude', 2), ('jaime', 2), ('raconte', 2), ('importe', 2), ('ice', 2), ('populations', 2), ('egalitefh', 2), ('gonfle', 2), ('directan', 2), ('visage', 2), ('adopte', 2), ('absolument', 2), ('juin', 2), ('livrer', 2), ('dhabitude', 2), ('u17', 2), ('convoque', 2), ('u18', 2), ('retards', 2), ('clair', 2), ('livre', 2), ('jure', 2), ('ait', 2), ('reelue', 2), ('dexperts', 2), ('lamentable', 2), ('rentable', 2), ('frais', 2), ('presidee', 2), ('explique', 2), ('startings', 2), ('blocks', 2), ('declasser', 2), ('recettes', 2), ('organisateur', 2), ('charges', 2), ('annuels', 2), ('recevra', 2), ('delegue', 2), ('interministeriel', 2), ('ecope', 2), ('folie', 2), ('market', 2), ('vis', 2), ('pratiquants', 2), ('equipement', 2), ('muhammad', 2), ('yunus', 2), ('riches', 2), ('courrier', 2), ('passionnant', 2), ('sachant', 2), ('pbs', 2), ('valides', 2), ('open', 2), ('data', 2), ('occitanie', 2), ('gymnase', 2), ('digital', 2), ('bloch', 2), ('consulting', 2), ('climateaction', 2), ('dictature', 2), ('maifrun', 2), ('voulus', 2), ('lyceenne', 2), ('plie', 2), ('rejoindre', 2), ('femme', 2), ('assiste', 2), ('49152', 2), ('assises', 2), ('doubs', 2), ('comparait', 2), ('18h30', 2), ('compagnie', 2), ('humain', 2), ('ultrahd', 2), ('dvb', 2), ('t2', 2), ('hevc', 2), ('csa', 2), ('pourtous', 2), ('48832', 2), ('bourg', 2), ('bresse', 2), ('savethedate', 2), ('hiver', 2), ('orient', 2), ('amman', 2), ('figures', 2), ('syndicales', 2), ('lacourneuve', 2), ('nefastes', 2), ('savo', 2), ('renaud', 2), ('delavillenie', 2), ('metres', 2), ('adequat', 2), ('choquant', 2), ('ceo', 2), ('disney', 2), ('disneyland', 2), ('poids', 2), ('daccord', 2), ('kilometres', 2), ('caen', 2), ('sia2018', 2), ('mutualiser', 2), ('moyen', 2), ('remuneration', 2), ('interessante', 2), ('carriere', 2), ('pourriez', 2), ('eenice2018', 2), ('bschool', 2), ('actrice', 2), ('cr', 2), ('cycle', 2), ('adn', 2), ('servir', 2), ('ticket', 2), ('clef', 2), ('maritime', 2), ('feront', 2), ('nature', 2), ('aide', 2), ('enfer', 2), ('villetaneuse', 2), ('georges', 2), ('reclame', 2), ('paroles', 2), ('experimentation', 2), ('is', 2), ('ambitions', 2), ('melenchon', 2), ('op', 2), ('universitaire', 2), ('instantane', 2), ('temporaire', 2), ('maintenantonagit', 2), ('dois', 2), ('avouer', 2), ('vingt', 2), ('interessant', 2), ('mascottes', 2), ('aborder', 2), ('dessus', 2), ('multinationales', 2), ('entretiendra', 2), ('accu', 2), ('terminal', 2), ('aeroport', 2), ('gaulle', 2), ('cruciale', 2), ('deplacement', 2), ('tricolores', 2), ('kitesurf', 2), ('pen', 2), ('asie', 2), ('manger', 2), ('donnee', 2), ('prioritaire', 2), ('coree', 2), ('relatif', 2), ('melanie', 2), ('dhaiby', 2), ('accompagnee', 2), ('operationnelles', 2), ('gare', 2), ('maintenue', 2), ('ligne718', 2), ('programmee', 2), ('marque', 2), ('bouees', 2), ('putains', 2), ('pannes', 2), ('velo', 2), ('ligner', 2), ('correspondance', 2), ('pomo', 2), ('ratee', 2), ('melun', 2), ('pressadom', 2), ('historiques', 2), ('maintenance', 2), ('horizon', 2), ('philippe', 2), ('immercuriens', 2), ('rolland', 2), ('300', 2), ('retransmission', 2), ('pensons', 2), ('4x10km', 2), ('pyongchang2018', 2), ('autonomes', 2), ('cup', 2), ('lachaise', 2), ('x2', 2), ('rendue', 2), ('poursuivre', 2), ('associations', 2), ('personnalite', 2), ('couine', 2), ('renove', 2), ('mai', 2), ('baisse', 2), ('patienter', 2), ('letsgo', 2), ('revie', 2), ('potentielles', 2), ('droitstv', 2), ('audiovisuels', 2), ('villageolympique', 2), ('discordantes', 2), ('phase', 2), ('saclay', 2), ('starcraft', 2), ('ii', 2), ('nation', 2), ('habitat', 2), ('sagit', 2), ('fontainebleau', 2), ('occasions', 2), ('pass', 2), ('worldcup', 2), ('levenementiel', 2), ('2023rugbyworldcup', 2), ('gala', 2), ('mondial2023', 2), ('moments', 2), ('plane', 2), ('rydercup2018', 2), ('rester', 2), ('priorit', 2), ('international', 2), ('semainelfm', 2), ('transparence', 2), ('amos', 2), ('client', 2), ('reconversion', 2), ('debats', 2), ('rtbf', 1), ('constat', 1), ('lucide', 1), ('fondamentaux', 1), ('boston2024', 1), ('logic', 1), ('retro', 1), ('commissions', 1), ('inutiles', 1), ('dtn', 1), ('bientat', 1), ('metronews', 1), ('confirmation', 1), ('jugeront', 1), ('ct', 1), ('girouette', 1), ('toulouse', 1), ('rsi', 1), ('arret', 1), ('limoges', 1), ('ectac', 1), ('prononcer', 1), ('calme', 1), ('siege', 1), ('ejectable', 1), ('rapproche', 1), ('ligue1', 1), ('reuters', 1), ('read', 1), ('your', 1), ('here', 1), ('hidalgoa', 1), ('rapporter', 1), ('chipster', 1), ('montpellier', 1), ('municipaux', 1), ('voe', 1), ('ple', 1), ('tw974', 1), ('perdent', 1), ('pourqupi', 1), ('actus', 1), ('kevinfrezhx3', 1), ('ann', 1), ('20minutes', 1), ('socialiste', 1), ('inutilement', 1), ('appli', 1), ('ver', 1), ('reveolympique', 1), ('wtfdj', 1), ('today', 1), ('lepopulaire', 1), ('perte', 1), ('pourcentage', 1), ('dhabitants', 1), ('daugmenter', 1), ('52', 1), ('marc', 1), ('morandini', 1), ('pc', 1), ('front', 1), ('allemand', 1), ('ideologie', 1), ('alberville', 1), ('cer', 1), ('rapporteront', 1), ('concretement', 1), ('adeptes', 1), ('frenchbashing', 1), ('eclaircissements', 1), ('entrainait', 1), ('villededemain', 1), ('indien', 1), ('myeur', 1), ('g5sahel', 1), ('montant', 1), ('germaine', 1), ('psgol', 1), ('eurobasket2017', 1), ('embaucheront', 1), ('reparler', 1), ('interressant', 1), ('chantage', 1), ('chelem', 1), ('usager', 1), ('fourni', 1), ('parois', 1), ('effel', 1), ('fn', 1), ('plafond', 1), ('epoque', 1), ('ajouter', 1), ('palet', 1), ('etiez', 1), ('veiller', 1), ('nuitdurugby', 1), ('pilotee', 1), ('vict', 1), ('971', 1), ('exprime', 1), ('pourriture', 1), ('pakman', 1), ('quora', 1), ('contributeurs', 1), ('partagent', 1), ('visions', 1), ('mtn', 1), ('stapsenperil', 1), ('defaillante', 1), ('arrivons', 1), ('articulation', 1), ('forma', 1), ('croix', 1), ('berny', 1), ('accordent', 1), ('pouvoirs', 1), ('reversibilite', 1), ('nuance', 1), ('fracture', 1), ('hallu', 1), ('simple', 1), ('claquement', 1), ('mutation', 1), ('placera', 1), ('euro2022', 1), ('sabani', 1), ('colere', 1), ('ouaiiiiii', 1), ('boubou', 1), ('ouaiiiii', 1), ('redoute', 1), ('400m', 1), ('1mm', 1), ('halles', 1), ('tourtriangle', 1), ('auteuil', 1), ('mm', 1), ('arretera', 1), ('sieste', 1), ('mandature', 1), ('100m', 1), ('actuel', 1), ('refassent', 1), ('simonnet', 1), ('emporte', 1), ('pretextant', 1), ('vesperini', 1), ('lautre', 1), ('green', 1), ('elegante', 1), ('creative', 1), ('beh', 1), ('nego', 1), ('musclee', 1), ('mince', 1), ('principederealite', 1), ('li', 1), ('directidf', 1), ('therapie', 1), ('piqure', 1), ('creativite', 1), ('turbo', 1), ('manne', 1), ('construisait', 1), ('petites', 1), ('maisons', 1), ('pensant', 1), ('immo', 1), ('concu', 1), ('clichy', 1), ('posent', 1), ('gar', 1), ('simonet', 1), ('servaient', 1), ('servirait', 1), ('85000', 1), ('generes', 1), ('salons', 1), ('tourismeaffaires', 1), ('decor', 1), ('103', 1), ('519', 1), ('eng', 1), ('seniors', 1), ('ups', 1), ('innover', 1), ('smartgrids', 1), ('citoyenne', 1), ('coordination', 1), ('montrons', 1), ('innovons', 1), ('voiturage', 1), ('anniversaire', 1), ('mayenne', 1), ('tranquille', 1), ('jb', 1), ('camille', 1), ('presentes', 1), ('confier', 1), ('importance', 1), ('assurement', 1), ('continuons', 1), ('webdoc', 1), ('rugb', 1), ('touristiques', 1), ('attractifs', 1), ('lavagueott', 1), ('confnpa', 1), ('heroesfr', 1), ('wellness', 1), ('frequent', 1), ('enrichissent', 1), ('grisaille', 1), ('eveneme', 1), ('lenthousiasme', 1), ('unanimiste', 1), ('posees', 1), ('finie', 1), ('agissez', 1), ('now', 1), ('administres', 1), ('pajol', 1), ('chapelle', 1), ('marxdormoy', 1), ('tb', 1), ('back', 1), ('day', 1), ('insepiens', 1), ('activent', 1), ('bouseux', 1), ('metropolitaine', 1), ('paru', 1), ('pourboire', 1), ('laissez', 1), ('annoncait', 1), ('portes', 1), ('ceeso', 1), ('flatter', 1), ('ego', 1), ('specialite', 1), ('5k', 1), ('70k', 1), ('20k', 1), ('10m', 1), ('spectateu', 1), ('nageront', 1), ('poing', 1), ('10mds', 1), ('nonauxjo', 1), ('elixir', 1), ('preparatqu', 1), ('dispo', 1), ('ordinaire', 1), ('aeroports', 1), ('manifestement', 1), ('suscitent', 1), ('editorialistes', 1), ('anous', 1), ('83', 1), ('aberration', 1), ('boites', 1), ('corrompus', 1), ('zoologique', 1), ('sarajevo', 1), ('84', 1), ('after', 1), ('jeanne', 1), ('pekin', 1), ('installations', 1), ('construite', 1), ('munich', 1), ('1972', 1), ('vedette', 1), ('remplie', 1), ('traversee', 1), ('portrait', 1), ('detendus', 1), ('cdenquete', 1), ('plaquetournante', 1), ('weed', 1), ('cannabis', 1), ('applaudisse', 1), ('soeur', 1), ('vegas', 1), ('facron', 1), ('gaspillage', 1), ('remportent', 1), ('endormi', 1), ('combattant', 1), ('excatement', 1), ('prevoit', 1), ('1slogan', 1), ('moliere', 1), ('bouchez', 1), ('accompli', 1), ('costauds', 1), ('fp', 1), ('commencons', 1), ('chomeurs', 1), ('pokora', 1), ('black', 1), ('actors', 1), ('digere', 1), ('cranes', 1), ('uf', 1), ('plebiscite', 1), ('traitresse', 1), ('asnelles', 1), ('culturel', 1), ('trouves', 1), ('rebeu', 1), ('sexy', 1), ('ltd', 1), ('pourvu', 1), ('canope', 1), ('crier', 1), ('mld', 1), ('tourism', 1), ('chevre', 1), ('mettez', 1), ('appartement', 1), ('trouvez', 1), ('bail', 1), ('dormir', 1), ('manquer', 1), ('seu', 1), ('ouverte', 1), ('fondateur', 1), ('decyclisme', 1), ('12mds', 1), ('delai', 1), ('verifie', 1), ('souci', 1), ('sales', 1), ('prions', 1), ('delinquants', 1), ('touches', 1), ('septahuit', 1), ('formidablement', 1), ('menee', 1), ('huitahuit', 1), ('tattoolete', 1), ('halle', 1), ('maigrot', 1), ('phares', 1), ('global', 1), ('demultiplier', 1), ('alloue', 1), ('nerf', 1), ('adidas', 1), ('nike', 1), ('ecosysteme', 1), ('lendemain', 1), ('sortis', 1), ('multicolores', 1), ('bannir', 1), ('racisme', 1), ('consciences', 1), ('fdh2017', 1), ('blonde', 1), ('selfierate', 1), ('pics', 1), ('montdemarsan', 1), ('reportage', 1), ('plusi', 1), ('whaoou', 1), ('gymnastics', 1), ('barnum', 1), ('vibre', 1), ('6nations', 1), ('arthur', 1), ('russie', 1), ('financera', 1), ('7mds', 1), ('fondation', 1), ('limpact', 1), ('rassemblee', 1), ('gouvt', 1), ('donnant', 1), ('frousse', 1), ('semmerder', 1), ('revisions', 1), ('gerbant', 1), ('claquer', 1), ('jesuisgrincheux', 1), ('abracadabrantesque', 1), ('contrer', 1), ('tels', 1), ('matheson', 1), ('szymanski', 1), ('prevenus', 1), ('pensance', 1), ('desesperance', 1), ('floute', 1), ('efface', 1), ('minori', 1), ('desinformation', 1), ('bases', 1), ('entrainements', 1), ('marathon', 1), ('abeillezvous', 1), ('coicault', 1), ('pnl', 1), ('rim', 1), ('damso', 1), ('booskapress', 1), ('masgrau', 1), ('rab', 1), ('illustre', 1), ('inconnu', 1), ('plat', 1), ('ventre', 1), ('massacres', 1), ('contraires', 1), ('entrainons', 1), ('souhaitais', 1), ('courir', 1), ('respire', 1), ('itw', 1), ('podiu', 1), ('recap', 1), ('levothyrox', 1), ('dataviz', 1), ('impraticable', 1), ('routes', 1), ('largue', 1), ('clients', 1), ('reformes', 1), ('alo', 1), ('transformee', 1), ('ardoise', 1), ('soudees', 1), ('faite', 1), ('avertit', 1), ('ingenierie', 1), ('populous', 1), ('approuves', 1), ('euphorique', 1), ('graines', 1), ('sincere', 1), ('oeuvre', 1), ('quadrilingue', 1), ('dimension', 1), ('inviter', 1), ('librement', 1), ('romera', 1), ('water', 1), ('salute', 1), ('cop21', 1), ('consigny', 1), ('former', 1), ('douane', 1), ('usagersidf', 1), ('construites', 1), ('concernent', 1), ('privilegies', 1), ('fipp', 1), ('federation', 1), ('reconnue', 1), ('ijspf', 1), ('pugilat', 1), ('reamenagement', 1), ('equitable', 1), ('paule', 1), ('curry', 1), ('couleur', 1), ('seduit', 1), ('commander', 1), ('polos', 1), ('exitchemises', 1), ('parioca', 1), ('fronsse', 1), ('moral', 1), ('impregner', 1), ('markterr', 1), ('lp', 1), ('lambert', 1), ('circuler', 1), ('gamerz', 1), ('jourdechance', 1), ('teamimpatient', 1), ('tiny', 1), ('thevoicekids', 1), ('chanterait', 1), ('raz', 1), ('memes', 1), ('nains', 1), ('110m', 1), ('haies', 1), ('blanche', 1), ('momie', 1), ('simplet', 1), ('imprime', 1), ('elh', 1), ('oseille', 1), ('baladent', 1), ('jep', 1), ('endettement', 1), ('batteur', 1), ('nolwenn', 1), ('leroy', 1), ('gemme', 1), ('justifier', 1), ('ecarts', 1), ('castaner', 1), ('panem', 1), ('circenses', 1), ('demonstration', 1), ('mds', 1), ('2010', 1), ('astuce', 1), ('nony', 1), ('jeancome', 1), ('origine', 1), ('aulsc', 1), ('weare24', 1), ('courtois', 1), ('decors', 1), ('graphistes', 1), ('graphicdesign', 1), ('did', 1), ('jep2017', 1), ('archives', 1), ('alwayssmile', 1), ('centres', 1), ('epide', 1), ('pause', 1), ('overboard', 1), ('tente', 1), ('ranimer', 1), ('rediffusion', 1), ('chanteur', 1), ('drolement', 1), ('entouree', 1), ('cridf', 1), ('oscar', 1), ('goes', 1), ('kids', 1), ('united', 1), ('jsp', 1), ('maybe', 1), ('dma', 1), ('rfm', 1), ('peur', 1), ('dominer', 1), ('fatigue', 1), ('triompher', 1), ('difficulte', 1), ('vaincre', 1), ('parlons', 1), ('masses', 1), ('recon', 1), ('budgetise', 1), ('nicolassarkozy', 1), ('onmyway', 1), ('cloturer', 1), ('evenemen', 1), ('doucesoiree', 1), ('59', 1), ('25ans', 1), ('remplissage', 1), ('poches', 1), ('applaudir', 1), ('reviendra', 1), ('donnez', 1), ('tal', 1), ('maitre', 1), ('gims', 1), ('repousser', 1), ('aimerait', 1), ('reclamez', 1), ('mylene', 1), ('farmer', 1), ('richard', 1), ('orlinsk', 1), ('guetta', 1), ('retransmis', 1), ('liberalisme', 1), ('package', 1), ('loitravailxxl', 1), ('ceta', 1), ('manif21septembre', 1), ('deash', 1), ('revendique', 1), ('fout', 1), ('fcais', 1), ('parlez', 1), ('envole', 1), ('sort', 1), ('rails', 1), ('passait', 1), ('programmateurs', 1), ('gouts', 1), ('ralentit', 1), ('patrickfiori', 1), ('nolwennleroy', 1), ('chantait', 1), ('cliche', 1), ('prod', 1), ('monte', 1), ('congolais', 1), ('aligne', 1), ('inoubliabl', 1), ('emettent', 1), ('repose', 1), ('queen', 1), ('profonde', 1), ('larrogante', 1), ('pousser', 1), ('brizitte', 1), ('orties', 1), ('accuellir', 1), ('moindre', 1), ('cheri', 1), ('vies', 1), ('pauvrete', 1), ('triomphe', 1), ('terminees', 1), ('provinciaux', 1), ('dsl', 1), ('nan', 1), ('too', 1), ('much', 1), ('saouler', 1), ('rendrais', 1), ('tenter', 1), ('vecu', 1), ('alain', 1), ('ducasse', 1), ('agapes', 1), ('fourniture', 1), ('tables', 1), ('ortolans', 1), ('landais', 1), ('user', 1), ('denier', 1), ('incapable', 1), ('mamainacouper', 1), ('retrouver', 1), ('willem', 1), ('blues', 1), ('presonnes', 1), ('ecran', 1), ('marinakaye', 1), ('album', 1), ('office', 1), ('declencheur', 1), ('chanteurs', 1), ('adore', 1), ('whaou', 1), ('amir', 1), ('separent', 1), ('marteau', 1), ('festive', 1), ('free', 1), ('coubertin', 1), ('electron', 1), ('productions', 1), ('manuvre', 1), ('pourrons', 1), ('nantis', 1), ('victoires', 1), ('finissent', 1), ('petiti', 1), ('playback', 1), ('decale', 1), ('lartiste', 1), ('ba', 1), ('attendais', 1), ('ridicule', 1), ('habi', 1), ('nique', 1), ('budjet', 1), ('sentinelle', 1), ('banal', 1), ('triste', 1), ('mariejoforever', 1), ('predecesseurs', 1), ('impatient', 1), ('98', 1), ('banksters', 1), ('forcer', 1), ('commissi', 1), ('lagardere', 1), ('gueuler', 1), ('meufs', 1), ('pourri', 1), ('reparti', 1), ('presta', 1), ('sinistres', 1), ('judicieux', 1), ('taulier', 1), ('shows', 1), ('mathilde', 1), ('soulagement', 1), ('pile', 1), ('amateur', 1), ('prefabriquee', 1), ('degoute', 1), ('taille', 1), ('autoderision', 1), ('americains', 1), ('influentes', 1), ('tonalite', 1), ('honteuse', 1), ('etranges', 1), ('3minutes', 1), ('vtep', 1), ('foutais', 1), ('ptdrrrr', 1), ('regarderais', 1), ('paname', 1), ('sui', 1), ('l2', 1), ('boycotte', 1), ('municipal', 1), ('jopara2024', 1), ('regardez', 1), ('mater', 1), ('2022', 1), ('bisous', 1), ('socle', 1), ('republicaines', 1), ('desir', 1), ('vivreensemble', 1), ('faineant', 1), ('absence', 1), ('j01', 1), ('20h00', 1), ('acte', 1), ('attaques', 1), ('passants', 1), ('allah', 1), ('akbar', 1), ('embellir', 1), ('arrondt', 1), ('parliez', 1), ('contents', 1), ('dorment', 1), ('opera', 1), ('6mds', 1), ('pagny', 1), ('filant', 1), ('ble', 1), ('choisis', 1), ('redaction', 1), ('boom', 1), ('depensons', 1), ('liberaliser', 1), ('soyez', 1), ('dupe', 1), ('fourbe', 1), ('conviee', 1), ('chequier', 1), ('caution', 1), ('recevait', 1), ('2012', 1), ('repartir', 1), ('fracasse', 1), ('ready24', 1), ('moderne', 1), ('rou', 1), ('numerik', 1), ('eloquent', 1), ('exhortant', 1), ('bat', 1), ('clivages', 1), ('californie', 1), ('liesses', 1), ('cimade', 1), ('douteux', 1), ('ortf', 1), ('deployer', 1), ('chauvine', 1), ('sedentarite', 1), ('cymes', 1), ('letarnec', 1), ('guyancourt', 1), ('lel', 1), ('ulysse', 1), ('chantera', 1), ('graaande', 1), ('excuse', 1), ('zermi', 1), ('instants', 1), ('mathoux', 1), ('portolano', 1), ('herve', 1), ('gentil', 1), ('decisif', 1), ('huchon', 1), ('precurseur', 1), ('aie', 1), ('cascad', 1), ('juncker', 1), ('tv5monde', 1), ('independante', 1), ('accred', 1), ('appareil', 1), ('phot', 1), ('accederont', 1), ('preferes', 1), ('surplus', 1), ('juriscup', 1), ('senti', 1), ('energique', 1), ('islife', 1), ('workout', 1), ('swim', 1), ('occulte', 1), ('chomage', 1), ('combats', 1), ('bleues', 1), ('qualifies', 1), ('ravie', 1), ('honteusement', 1), ('crade', 1), ('monty', 1), ('python', 1), ('boite', 1), ('30km', 1), ('accueillis', 1), ('crepsidf', 1), ('magique', 1), ('lelysee', 1), ('nvl', 1), ('galere', 1), ('suer', 1), ('us', 1), ('pogrom', 1), ('heu', 1), ('receptions', 1), ('mousser', 1), ('daltons', 1), ('baver', 1), ('fran', 1), ('feedback', 1), ('journeesdupatrimoine', 1), ('declin', 1), ('gargarise', 1), ('reussie', 1), ('emerveille', 1), ('digne', 1), ('intelligent', 1), ('alecole', 1), ('weekend', 1), ('terrasse', 1), ('rater', 1), ('tombent', 1), ('differences', 1), ('disparaissen', 1), ('rang', 1), ('lopportunite', 1), ('dorganiser', 1), ('accordde', 1), ('madrid', 1), ('2032', 1), ('chasse', 1), ('poil', 1), ('bienvenuealhotel', 1), ('boy', 1), ('la2028', 1), ('rois', 1), ('brochette', 1), ('manq', 1), ('acceptent', 1), ('mg', 1), ('agora', 1), ('plaiz', 1), ('best', 1), ('buddies', 1), ('intelligente', 1), ('sacrifices', 1), ('surmonter', 1), ('bagmontana', 1), ('visuels', 1), ('facebook', 1), ('throwback', 1), ('ovpl', 1), ('dessine', 1), ('peripherie', 1), ('effervescence', 1), ('seda', 1), ('pleu', 1), ('adopterons', 1), ('operationnel', 1), ('efficace', 1), ('francilen', 1), ('preparez', 1), ('postuler', 1), ('liee', 1), ('testosterone', 1), ('couleu', 1), ('dhafer', 1), ('diars', 1), ('kelb', 1), ('auto', 1), ('rectrice', 1), ('meck', 1), ('mine', 1), ('dor', 1), ('ellysee', 1), ('bonplanmaraude', 1), ('6e', 1), ('seras', 1), ('teamunss', 1), ('architecte', 1), ('cpena', 1), ('reactions', 1), ('descente', 1), ('fetons', 1), ('enormement', 1), ('gagnion', 1), ('nicol', 1), ('repost', 1), ('remarquable', 1), ('simagrees', 1), ('reseaux', 1), ('fatiguent', 1), ('lache', 1), ('koolantha', 1), ('remerciement', 1), ('silicon', 1), ('valley', 1), ('mehb', 1), ('snober', 1), ('mepriser', 1), ('reagit', 1), ('choque', 1), ('dan', 1), ('prends', 1), ('matins', 1), ('remerciements', 1), ('cadres', 1), ('ouvriers', 1), ('aises', 1), ('fausse', 1), ('lavis', 1), ('feliciter', 1), ('nabot', 1), ('sourire', 1), ('educative', 1), ('matelas', 1), ('reussissent', 1), ('reconcilier', 1), ('adoraaaable', 1), ('ptits', 1), ('drapeaux', 1), ('18eme', 1), ('dieppe', 1), ('thread', 1), ('sexisme', 1), ('mixtes', 1), ('foret', 1), ('ecrans', 1), ('filmer', 1), ('applaudissements', 1), ('accomplie', 1), ('lea', 1), ('gauthier', 1), ('annaelle', 1), ('benjamin', 1), ('convivialite', 1), ('saluent', 1), ('hesites', 1), ('kohlanta', 1), ('soireeindecise', 1), ('mobilisa', 1), ('conseillere', 1), ('iannetta', 1), ('retrouvailles', 1), ('salut', 1), ('dechet', 1), ('renouvelables', 1), ('servait', 1), ('love', 1), ('munir', 1), ('datant', 1), ('investissent', 1), ('siaap', 1), ('quinon', 1), ('btw', 1), ('chapeau', 1), ('zevent', 1), ('you', 1), ('rock', 1), ('viewerdelombre', 1), ('surs', 1), ('disleaanne', 1), ('more2024', 1), ('millenials', 1), ('waiona', 1), ('billard', 1), ('dessin', 1), ('plantu', 1), ('cartoon', 1), ('rem', 1), ('inondable', 1), ('democratique', 1), ('decoree', 1), ('accredites', 1), ('incitation', 1), ('licence', 1), ('what', 1), ('else', 1), ('percevoir', 1), ('championnes', 1), ('crains', 1), ('reduite', 1), ('why', 1), ('accepterait', 1), ('marraine', 1), ('foiredecaen', 1), ('christinearron', 1), ('impliquee', 1), ('fecavolley', 1), ('fiori', 1), ('gache', 1), ('maintentant', 1), ('alberti', 1), ('malheureux', 1), ('voulant', 1), ('dintegration', 1), ('risquent', 1), ('flasher', 1), ('verbaliser', 1), ('splendide', 1), ('alerter', 1), ('sebastien', 1), ('sardines', 1), ('lola', 1), ('loupe', 1), ('bouse', 1), ('vache', 1), ('selectionnes', 1), ('liza', 1), ('monet', 1), ('statue', 1), ('liberte', 1), ('christ', 1), ('redempteur', 1), ('pcqon', 1), ('flemmards', 1), ('faim', 1), ('daft', 1), ('punk', 1), ('martel', 1), ('occupera', 1), ('choregraphe', 1), ('interpretee', 1), ('hallyday', 1), ('twirling', 1), ('braderielille', 1), ('cours', 1), ('observer', 1), ('unitenationale', 1), ('notres', 1), ('plaidant', 1), ('lacause', 1), ('icanmakeitalone', 1), ('annulee', 1), ('mettra', 1), ('pourris', 1), ('chiantesque', 1), ('moipresident', 1), ('trace', 1), ('dejeuner', 1), ('offert', 1), ('bas', 1), ('pretendre', 1), ('chine', 1), ('torpiller', 1), ('pleins', 1), ('rafale', 1), ('fh2017', 1), ('cortege', 1), ('deadline', 1), ('decalee', 1), ('engraisse', 1), ('percent', 1), ('cheris', 1), ('prie', 1), ('pouffer', 1), ('overdose', 1), ('popularite', 1), ('synthese', 1), ('reaffirme', 1), ('francis', 1), ('didier', 1), ('nanjedeconne', 1), ('rigoureusement', 1), ('remporter', 1), ('guignol', 1), ('annules', 1), ('cdanslair', 1), ('grds', 1), ('toile', 1), ('bis', 1), ('arreter', 1), ('toiaussiannuleuntruc', 1), ('ravva', 1), ('cccp', 1), ('pretention', 1), ('endetter', 1), ('dizaines', 1), ('asphyxie', 1), ('plaisirs', 1), ('directpr', 1), ('garantie', 1), ('rassembles', 1), ('porteuse', 1), ('proposons', 1), ('echangeons', 1), ('progresser', 1), ('harmonie', 1), ('devoiler', 1), ('resolument', 1), ('aille', 1), ('garantit', 1), ('grotesque', 1), ('iocgohome', 1), ('olympics2016', 1), ('relevez', 1), ('descendre', 1), ('exclusion', 1), ('trichent', 1), ('ferait', 1), ('militons', 1), ('socialement', 1), ('culbuto', 1), ('zep', 1), ('entendu', 1), ('redepense', 1), ('poignee', 1), ('renseignement', 1), ('prier', 1), ('obtienne', 1), ('dingue', 1), ('qq', 1), ('maracana', 1), ('brille', 1), ('scandalise', 1), ('vendue', 1), ('encheres', 1), ('ventes', 1), ('voyez', 1), ('artiste', 1), ('americain', 1), ('cotations', 1), ('pinault', 1), ('madoff', 1), ('esthetique', 1), ('glamour', 1), ('proprement', 1), ('suscite', 1), ('2passion', 1), ('ferveur', 1), ('drames', 1), ('regard', 1), ('1compte', 1), ('escomptant', 1), ('societes', 1), ('anxieux', 1), ('bouc', 1), ('emissaire', 1), ('permanent', 1), ('2plus', 1), ('scandaleux', 1), ('2constater', 1), ('richesses', 1), ('detenus', 1), ('2la', 1), ('emouvoir', 1), ('effacer', 1), ('souf', 1), ('tps', 1), ('biais', 1), ('legiondhonneur', 1), ('attribuee', 1), ('polo', 1), ('smic', 1), ('97', 1), ('dumont', 1), ('paie', 1), ('informe', 1), ('taraude', 1), ('paulo', 1), ('footballeur', 1), ('aurez', 1), ('justement', 1), ('94', 1), ('judicieuse', 1), ('gachis', 1), ('besoins', 1), ('vitaux', 1), ('faits', 1), ('170', 1), ('secret', 1), ('boycott', 1), ('cinq', 1), ('gaspiller', 1), ('rattrape', 1), ('obtiens', 1), ('stp', 1), ('penultinien', 1), ('flamby', 1), ('illico', 1), ('envahisse', 1), ('sape', 1), ('votera', 1), ('kinshasa', 1), ('ecouter', 1), ('manifestations', 1), ('frissons', 1), ('escapade', 1), ('designee', 1), ('ghost', 1), ('cachent', 1), ('mourir', 1), ('fanfan', 1), ('teinture', 1), ('soucis', 1), ('ouai', 1), ('epreuvedevoileolympique', 1), ('mascaret', 1), ('gacher', 1), ('decu', 1), ('sac', 1), ('degradee', 1), ('desservira', 1), ('olympiq', 1), ('genial', 1), ('embarquement', 1), ('tjs', 1), ('plantes', 1), ('gazon', 1), ('cherche', 1), ('additionnelles', 1), ('trolling', 1), ('apprends', 1), ('echapper', 1), ('debutent', 1), ('labonneexcuse', 1), ('refuser', 1), ('boule', 1), ('lyonnaise', 1), ('bouchons', 1), ('vanne', 1), ('pourrie', 1), ('elements', 1), ('but', 1), ('croise', 1), ('deroulent', 1), ('pmrstreet', 1), ('fantasmer', 1), ('cuit', 1), ('mangeurs', 1), ('fours', 1), ('bataclan', 1), ('saintetiennedurouvray', 1), ('compiegne', 1), ('clairoix', 1), ('parfum', 1), ('prevision', 1), ('prenant', 1), ('soutenabilite', 1), ('planete', 1), ('erc', 1), ('minutieusement', 1), ('roma2024', 1), ('lolympiade', 1), ('prof', 1), ('equitation', 1), ('detrousse', 1), ('sure', 1), ('choisissent', 1), ('clarte', 1), ('fantasme', 1), ('quentin', 1), ('siliconvalley', 1), ('mecs', 1), ('lobbyiste', 1), ('aralympique', 1), ('jop2016', 1), ('ryadhsallem', 1), ('handape', 1), ('brazil', 1), ('debout', 1), ('cauchemard', 1), ('lookea', 1), ('esp', 1), ('solution', 1), ('joathome', 1), ('certainement', 1), ('supports', 1), ('casaitalia', 1), ('desordre', 1), ('menvole', 1), ('16h', 1), ('comingsoon', 1), ('atteint', 1), ('enrichit', 1), ('choisirait', 1), ('hesitez', 1), ('visitera', 1), ('servira', 1), ('fossoyeur', 1), ('cahuzac', 1), ('cazeneuve', 1), ('evident', 1), ('pertinent', 1), ('flambeau', 1), ('floorball', 1), ('are', 1), ('ready', 1), ('retransmissions', 1), ('feraient', 1), ('internet', 1), ('charia', 1), ('vigueur', 1), ('islam', 1), ('barbarie', 1), ('nudge', 1), ('appauvrit', 1), ('nuisible', 1), ('reviser', 1), ('plans', 1), ('scandales', 1), ('assimiles', 1), ('gris', 1), ('apprenti', 1), ('marin', 1), ('dompte', 1), ('octobre', 1), ('protegelaplanete', 1), ('degoutee', 1), ('groupement', 1), ('concentre', 1), ('svplanete', 1), ('rapprocher', 1), ('unanime', 1), ('posait', 1), ('tee', 1), ('shirt', 1), ('niceattentat', 1), ('demission', 1), ('tbt', 1), ('07au', 1), ('fetenationale', 1), ('tousprets', 1), ('apero', 1), ('hypocrisie', 1), ('disqualification', 1), ('ladoumegue', 1), ('1932', 1), ('production', 1), ('voilier', 1), ('amerigovespucci', 1), ('acheve', 1), ('promotionnel', 1), ('ratez', 1), ('articles', 1), ('jeveux2015', 1), ('hospitalite', 1), ('nettoie', 1), ('prime', 1), ('saluee', 1), ('situvasario', 1), ('demontre', 1), ('cdos', 1), ('41', 1), ('maintenu', 1), ('construisez', 1), ('expertise', 1), ('socialbusiness', 1), ('reduction', 1), ('inegalites', 1), ('assos', 1), ('achevee', 1), ('ptn', 1), ('vend', 1), ('sauter', 1), ('sf', 1), ('symbolique', 1), ('vehiculer', 1), ('messages', 1), ('choquee', 1), ('refus', 1), ('hommage', 1), ('homophobe', 1), ('orlando', 1), ('intrinseque', 1), ('marnais', 1), ('infractructures', 1), ('mesure', 1), ('defiscaliser', 1), ('organisatrices', 1), ('rentables', 1), ('save', 1), ('jnss2016', 1), ('portugal', 1), ('empochent', 1), ('pactole', 1), ('perturbent', 1), ('ppp', 1), ('dure', 1), ('digerer', 1), ('tromper', 1), ('w', 1), ('celebrations', 1), ('tdf2016', 1), ('letapedutour', 1), ('foodtech', 1), ('realitevirtuelle', 1), ('anon', 1), ('voudraient', 1), ('retweete', 1), ('jennifer', 1), ('lopez', 1), ('mentale', 1), ('forces', 1), ('reflexe', 1), ('graet', 1), ('bourdindirect', 1), ('update', 1), ('trompe', 1), ('deroulement', 1), ('credibiliser', 1), ('territoiresdinfos', 1), ('appui', 1), ('rapporte', 1), ('economiquement', 1), ('mediter', 1), ('allezlesbleu', 1), ('staps', 1), ('events', 1), ('fanzone', 1), ('fanzonetoulouse', 1), ('57', 1), ('mev2016', 1), ('unimev', 1), ('ameliore', 1), ('investisseurs', 1), ('8h10', 1), ('berlin', 1), ('1936', 1), ('fanzonetoureiffel', 1), ('certification', 1), ('iso20121', 1), ('rejeter', 1), ('boniface', 1), ('file', 1), ('collaborative', 1), ('rip', 1), ('rocard', 1), ('accumule', 1), ('cliches', 1), ('partis', 1), ('pts', 1), ('participons', 1), ('party', 1), ('concourir', 1), ('ralentie', 1), ('obligatoire', 1), ('casier', 1), ('vierge', 1), ('politicien', 1), ('cm', 1), ('fdj', 1), ('reunies', 1), ('mignon', 1), ('kua', 1), ('envoie', 1), ('lobbyistes', 1), ('ilms', 1), ('strategies', 1), ('tir', 1), ('remplaces', 1), ('arbalettes', 1), ('reunie', 1), ('dream', 1), ('surfer', 1), ('vague', 1), ('demarche', 1), ('laccessibilite', 1), ('figure', 1), ('daccompagnement', 1), ('inaugure', 1), ('participative', 1), ('mouais', 1), ('interrogeant', 1), ('coop', 1), ('collterrs', 1), ('associees', 1), ('recul', 1), ('suffisant', 1), ('cru', 1), ('natation', 1), ('baignoire', 1), ('officiels', 1), ('vivatech', 1), ('lausanne', 1), ('bernardlapasset', 1), ('lobbies', 1), ('polpor', 1), ('350', 1), ('hectares', 1), ('dedies', 1), ('vairessurmarne', 1), ('soupirs', 1), ('ck', 1), ('vaire', 1), ('textileinnovations', 1), ('innovante', 1), ('demarre', 1), ('ambassadrice', 1), ('spectateur', 1), ('selectionnables', 1), ('teamlifa', 1), ('elegant', 1), ('pin', 1), ('runners', 1), ('geant', 1), ('pallez', 1), ('peinturebiodegradable', 1), ('uni', 1), ('representants', 1), ('spectatuer', 1), ('avgeekfr', 1), ('volera', 1), ('nautisme', 1), ('cubem', 1), ('pendent', 1), ('active', 1), ('moderniser', 1), ('cmdijon', 1), ('dijonla', 1), ('laborde', 1), ('signent', 1), ('cooperation', 1), ('syndicalistes', 1), ('connaissent', 1), ('revendications', 1), ('doss', 1), ('productivite', 1), ('pmp', 1), ('1eres', 1), ('independant', 1), ('4e', 1), ('contorsionnent', 1), ('reflechissez', 1), ('bnp', 1), ('paribas', 1), ('brennus', 1), ('swing', 1), ('teampandanouslesjeux', 1), ('visitent', 1), ('louvre', 1), ('usep75', 1), ('ruche', 1), ('courses', 1), ('scooters', 1), ('homologuees', 1), ('atheltes', 1), ('representer', 1), ('celebree', 1), ('le93', 1), ('concrets', 1), ('axe', 1), ('portons', 1), ('laxe', 1), ('moteur', 1), ('unissent', 1), ('break', 1), ('cdos21', 1), ('amf21', 1), ('sourires', 1), ('trains', 1), ('fiction', 1), ('interactive', 1), ('familledu', 1), ('familleolympique', 1), ('32', 1), ('92', 1), ('colombes2024', 1), ('lecole', 1), ('dizieux', 1), ('associer', 1), ('arlesien', 1), ('tte', 1), ('motive', 1), ('chambre', 1), ('escient', 1), ('contestataire', 1), ('manifester', 1), ('profondes', 1), ('symboliques', 1), ('ssd', 1), ('cand', 1), ('gpe', 1), ('axes', 1), ('cd93', 1), ('albert', 1), ('linclusion', 1), ('laccent', 1), ('legalite', 1), ('sappuyant', 1), ('collectivite', 1), ('rayonne', 1), ('fourmille', 1), ('dactions', 1), ('datouts', 1), ('outils', 1), ('appropriee', 1), ('reunions', 1), ('thematiques', 1), ('souhait', 1), ('associes', 1), ('accelerent', 1), ('agure', 1), ('rtctur', 1), ('brillante', 1), ('operation', 1), ('gagnent', 1), ('camp', 1), ('correctement', 1), ('tqo', 1), ('roadto', 1), ('francilien', 1), ('virginia', 1), ('melee', 1), ('recents', 1), ('recale', 1), ('considere', 1), ('defaillant', 1), ('martinez', 1), ('detour', 1), ('francs', 1), ('revelera', 1), ('arpajon', 1), ('privilegie', 1), ('dessous', 1), ('desirer', 1), ('fumigenes', 1), ('complementaires', 1), ('quelqu', 1), ('capoter', 1), ('poule', 1), ('exactement', 1), ('clash', 1), ('temple', 1), ('overcool', 1), ('pronojdd', 1), ('hat', 1), ('trick', 1), ('sqy', 1), ('lorsque', 1), ('secr', 1), ('semelles', 1), ('connectees', 1), ('engagez', 1), ('proposant', 1), ('manif', 1), ('comptons', 1), ('cables', 1), ('diplo', 1), ('qqls', 1), ('investisse', 1), ('refection', 1), ('entrepreneur', 1), ('rapporteraient', 1), ('compost', 1), ('telco', 1), ('obstination', 1), ('epine', 1), ('proprete', 1), ('circulaire', 1), ('autissier', 1), ('attendent', 1), ('450', 1), ('raisonnable', 1), ('distance', 1), ('ttes', 1), ('autoroutes', 1), ('avc', 1), ('olympic', 1), ('entend', 1), ('deplorable', 1), ('457', 1), ('residence', 1), ('technos', 1), ('fens2016', 1), ('marge', 1), ('finissait', 1), ('accidents', 1), ('chargee', 1), ('sentir', 1), ('lejsd', 1), ('gain', 1), ('casser', 1), ('couilles', 1), ('interdictions', 1), ('ultras', 1), ('diverses', 1), ('uefaeuro2016', 1), ('civile', 1), ('compromis', 1), ('douce', 1), ('humeur', 1), ('entierement', 1), ('faciliter', 1), ('ressemblait', 1), ('fautpasrever', 1), ('cyclismactu', 1), ('tennisactu', 1), ('eloignent', 1), ('debordements', 1), ('scenes', 1), ('guerillas', 1), ('urbaines', 1), ('voyants', 1), ('euros2016', 1), ('enrages', 1), ('bonsoir', 1), ('annoncez', 1), ('ligue', 1), ('comprendra', 1), ('jdcjdr', 1), ('pdf', 1), ('formulaire', 1), ('remplir', 1), ('hypothetique', 1), ('angrus', 1), ('lvq', 1), ('chaises', 1), ('claquee', 1), ('guerilla', 1), ('joyeuse', 1), ('qualifient', 1), ('bfmfoot', 1), ('overdosedeviolence', 1), ('r92st', 1), ('lacets', 1), ('chiant', 1), ('lacqutho', 1), ('gaitelyrique', 1), ('serres', 1), ('rhabiller', 1), ('planche', 1), ('augmentation', 1), ('presentiel', 1), ('allies', 1), ('vr', 1), ('xpot', 1), ('contraignant', 1), ('partagez', 1), ('frarou', 1), ('choisissez', 1), ('bordayl', 1), ('sofrench', 1), ('out', 1), ('utilisez', 1), ('cityzenmap', 1), ('dialoguer', 1), ('degats', 1), ('ruinent', 1), ('morano', 1), ('primaire2016', 1), ('supporter', 1), ('ffn', 1), ('voisins', 1), ('europeens', 1), ('colline', 1), ('vtt', 1), ('intuition', 1), ('grille', 1), ('etrangere', 1), ('tensions', 1), ('convier', 1), ('incapacite', 1), ('calamiteux', 1), ('puanteur', 1), ('showeuro2016', 1), ('lis', 1), ('commentaires', 1), ('legrandshow', 1), ('antiprog', 1), ('a1', 1), ('1resultat', 1), ('demandent', 1), ('nathalielevync', 1), ('ddouillet', 1), ('eh', 1), ('concerts', 1), ('louane', 1), ('20h2017', 1), ('roland', 1), ('garros', 1), ('mouvements', 1), ('gratter', 1), ('udi', 1), ('piteuse', 1), ('phiphou', 1), ('echoient', 1), ('foutu', 1), ('rappellent', 1), ('prolos', 1), ('tourner', 1), ('strike', 1), ('grevesncf', 1), ('grevedeseboueurs', 1), ('blocagerungis', 1), ('plombee', 1), ('renover', 1), ('rendent', 1), ('unebellebandedegoistes', 1), ('envoyer', 1), ('syndicats', 1), ('pasivite', 1), ('plombees', 1), ('terreur', 1), ('eventres', 1), ('toilettes', 1), ('athenes', 1), ('apprend', 1), ('griller', 1), ('mouille', 1), ('finira', 1), ('valent', 1), ('terrains', 1), ('aubervillers', 1), ('mobilisons', 1), ('respectezaubervilliers', 1), ('restons', 1), ('5km', 1), ('2km', 1), ('compacite', 1), ('2005', 1), ('gip', 1), ('valider', 1), ('landy', 1), ('frontdegauche', 1), ('jack', 1), ('ralite', 1), ('couler', 1), ('touche', 1), ('mediterraneen', 1), ('gaudin', 1), ('avais', 1), ('cluster', 1), ('dugny', 1), ('vessies', 1), ('lanternes', 1), ('effectivement', 1), ('mediatique', 1), ('olympiades', 1), ('lecomte', 1), ('acter', 1), ('emplacement', 1), ('precise', 1), ('m5s', 1), ('tribunes', 1), ('outre', 1), ('copresidents', 1), ('oncroiselesdoigts', 1), ('journeemondialedelenvironnement', 1), ('jme2016', 1), ('eugreenweek', 1), ('dorientation', 1), ('dptdunord', 1), ('senateurs', 1), ('reaffirment', 1), ('attachement', 1), ('schema', 1), ('envisages', 1), ('zo', 1), ('lune', 1), ('reservee', 1), ('rager', 1), ('riverains', 1), ('pluriel', 1), ('representes', 1), ('serieusement', 1), ('bureaux', 1), ('laube', 1), ('larena', 1), ('alternatif', 1), ('ball', 1), ('destruction', 1), ('sido18', 1), ('magifique', 1), ('sortir', 1), ('rimer', 1), ('provisoires', 1), ('permanentes', 1), ('atterrissage', 1), ('daigne', 1), ('quitter', 1), ('gdn', 1), ('collection', 1), ('ligue1conforama', 1), ('swear', 1), ('juventusrealmadrid', 1), ('campagnes', 1), ('univers', 1), ('songeurs', 1), ('europeennes', 1), ('2026', 1), ('gran', 1), ('refusent', 1), ('pabo', 1), ('marrant', 1), ('antiderapage', 1), ('cross', 1), ('cheque', 1), ('prevoir', 1), ('salles', 1), ('multimodales', 1), ('3000', 1), ('animera', 1), ('granddebat', 1), ('rv', 1), ('sem', 1), ('lampe', 1), ('aladin', 1), ('engagements', 1), ('epaulee', 1), ('terminale', 1), ('gustave', 1), ('concernes', 1), ('tabler', 1), ('pertes', 1), ('biarritz', 1), ('recule', 1), ('impasses', 1), ('organisait', 1), ('colonie', 1), ('mega', 1), ('zut', 1), ('thauvin', 1), ('cola', 1), ('mc', 1), ('git', 1), ('jardin', 1), ('botanique', 1), ('bruits', 1), ('determinants', 1), ('tension', 1), ('bruit', 1), ('mkn', 1), ('pinocchio', 1), ('fournisseur', 1), ('perches', 1), ('augmente', 1), ('sorties', 1), ('realisees', 1), ('evaluer', 1), ('optimiser', 1), ('convoquer', 1), ('frequence', 1), ('attentas', 1), ('intensifie', 1), ('trottinette', 1), ('quinoa', 1), ('respectivement', 1), ('aia', 1), ('deau', 1), ('crypto', 1), ('olympico', 1), ('deni', 1), ('asmonaco', 1), ('mbappe', 1), ('falcao', 1), ('piscinesolympiques', 1), ('insolites', 1), ('loooooose', 1), ('davantage', 1), ('previsionnel', 1), ('sdg', 1), ('lesrepublicains', 1), ('splatoonec', 1), ('lon', 1), ('mandates', 1), ('existant', 1), ('inserer', 1), ('retrogrades', 1), ('realistes', 1), ('revenir', 1), ('proches', 1), ('gonfler', 1), ('session', 1), ('toxic', 1), ('ve', 1), ('ajustements', 1), ('apprecier', 1), ('reussites', 1), ('gesti', 1), ('matinal', 1), ('delyx', 1), ('el', 1), ('prevoient', 1), ('bcp', 1), ('commentaire', 1), ('puent', 1), ('milliar', 1), ('tripler', 1), ('startupnation', 1), ('rassures', 1), ('cice', 1), ('utilisons', 1), ('veritable', 1), ('ignominie', 1), ('profondement', 1), ('immoraux', 1), ('irresponsables', 1), ('inspection', 1), ('alertent', 1), ('terminer', 1), ('linspection', 1), ('impossible', 1), ('puisqu', 1), ('visuel', 1), ('anonymement', 1), ('mail', 1), ('lisez', 1), ('pointer', 1), ('preoccupant', 1), ('avi', 1), ('cgedd', 1), ('ligjs', 1), ('marquise', 1), ('dexcedent', 1), ('alsace', 1), ('stadeolympique', 1), ('passant', 1), ('cinquieme', 1), ('dranceen', 1), ('ment', 1), ('drancy', 1), ('dranceens', 1), ('remportes', 1), ('penalisent', 1), ('depasserait', 1), ('peupl', 1), ('craignait', 1), ('8mds', 1), ('bidon', 1), ('dorer', 1), ('blason', 1), ('andalouse', 1), ('psgasm', 1), ('avocat', 1), ('semaineindustrie', 1), ('hdpros', 1), ('endettes', 1), ('longues', 1), ('manifes', 1), ('concernees', 1), ('vit', 1), ('cingles', 1), ('deposer', 1), ('operations', 1), ('ironiquement', 1), ('ame', 1), ('mdrrr', 1), ('etonnante', 1), ('normale', 1), ('cheres', 1), ('communs', 1), ('cou', 1), ('rajouter', 1), ('linstant', 1), ('18h50', 1), ('linterview', 1), ('parlerons', 1), ('medef', 1), ('estimation', 1), ('diminution', 1), ('prog', 1), ('debattue', 1), ('mun', 1), ('pilier', 1), ('province', 1), ('pitre', 1), ('craintes', 1), ('censees', 1), ('nes', 1), ('pist88', 1), ('blr', 1), ('hc', 1), ('suffit', 1), ('amputer', 1), ('budge', 1), ('ho', 1), ('puit', 1), ('evalues', 1), ('partait', 1), ('chiffrage', 1), ('exprimait', 1), ('desapprobation', 1), ('repondait', 1), ('profi', 1), ('assumer', 1), ('phenomenale', 1), ('quoiqu', 1), ('deficits', 1), ('sacrifierons', 1), ('fenetres', 1), ('lancent', 1), ('grince', 1), ('communiquer', 1), ('elemen', 1), ('mlp', 1), ('olympiquede', 1), ('17000', 1), ('bubble', 1), ('box', 1), ('remportait', 1), ('lappel', 1), ('talents', 1), ('paralymp', 1), ('collectives', 1), ('semblent', 1), ('malades', 1), ('mitrailles', 1), ('sos', 1), ('agir', 1), ('collectivement', 1), ('attriste', 1), ('compromet', 1), ('inscription', 1), ('tonique', 1), ('reboul', 1), ('rom', 1), ('elyx', 1), ('sappuyer', 1), ('dinclusion', 1), ('convaincu', 1), ('devienne', 1), ('desirable', 1), ('collective', 1), ('transilien', 1), ('cul', 1), ('autr', 1), ('projecteur', 1), ('enseigne', 1), ('form', 1), ('femmepouvoitetdentelles', 1), ('proposes', 1), ('economiqu', 1), ('incubateur', 1), ('avecpoleemploi', 1), ('maillot', 1), ('heros', 1), ('jeremy', 1), ('participait', 1), ('colloque', 1), ('liv', 1), ('labs', 1), ('autou', 1), ('serie', 1), ('infojeunesse', 1), ('missionnes', 1), ('ris', 1), ('reflexions', 1), ('uniformation', 1), ('nationalites', 1), ('linguistiques', 1), ('qualifiee', 1), ('tetes', 1), ('metropoles', 1), ('emploiformation', 1), ('artistique', 1), ('unir', 1), ('rencontrent', 1), ('rassemble', 1), ('deploiement', 1), ('360', 1), ('generant', 1), ('1m', 1), ('dallard', 1), ('sortons', 1), ('confiant', 1), ('sereins', 1), ('mecanisme', 1), ('definit', 1), ('produits', 1), ('projette', 1), ('skateboard', 1), ('ffrollerskatebord', 1), ('ffrs', 1), ('procedure', 1), ('dexpropriation', 1), ('dutilite', 1), ('lacher', 1), ('oeufs', 1), ('proposecommewauquiez', 1), ('fiscal', 1), ('systemique', 1), ('causer', 1), ('rediff', 1), ('marketi', 1), ('enedis', 1), ('veritables', 1), ('croisee', 1), ('sengagent', 1), ('marquer', 1), ('patronage', 1), ('temoign', 1), ('verification', 1), ('contribution', 1), ('lolympisme', 1), ('vehiculesautonomes', 1), ('jotokyo2020', 1), ('cergy', 1), ('anerie', 1), ('approche', 1), ('maitrisee', 1), ('cadrer', 1), ('partenar', 1), ('arrivant', 1), ('constater', 1), ('legislatif', 1), ('extrait', 1), ('opere', 1), ('lelaboration', 1), ('analyser', 1), ('mobilisee', 1), ('commerce', 1), ('digitalisation', 1), ('paraitre', 1), ('exceptions', 1), ('propri', 1), ('creil', 1), ('ivry', 1), ('questionnent', 1), ('gentrification', 1), ('incontrolee', 1), ('sarranger', 1), ('achete', 1), ('barriereshonte', 1), ('lepetit', 1), ('wednesday', 1), ('28th', 1), ('ioc', 1), ('led', 1), ('dubi', 1), ('will', 1), ('for', 1), ('1st', 1), ('project', 1), ('rev', 1), ('offres', 1), ('olympiens', 1), ('structure', 1), ('processus', 1), ('poursuite', 1), ('rsu', 1), ('rencontrer', 1), ('consortium', 1), ('preventive', 1), ('contourner', 1), ('sophie', 1), ('lorant', 1), ('fabrice', 1), ('lacroix', 1), ('mazargues', 1), ('deloge', 1), ('valait', 1), ('benabb', 1), ('athetisme', 1), ('moirans', 1), ('isere', 1), ('millian', 1), ('boucher', 1), ('brillamment', 1), ('decroches', 1), ('ere', 1), ('braun', 1), ('decroche', 1), ('qualification', 1), ('ers', 1), ('chpts', 1), ('natati', 1), ('newvideo', 1), ('yo', 1), ('taxis', 1), ('populai', 1), ('cros', 1), ('insep', 1), ('impliques', 1), ('associat', 1), ('immateriel', 1), ('pratiq', 1), ('parrain', 1), ('refrain', 1), ('conservant', 1), ('derouleront', 1), ('tenaille', 1), ('fourmis', 1), ('beaumont', 1), ('oise', 1), ('paysdelaloire', 1), ('ministerielles', 1), ('lenv', 1), ('christine', 1), ('evelyne', 1), ('90k', 1), ('mulhouse', 1), ('activite', 1), ('fixe', 1), ('rapple', 1), ('pianos', 1), ('balade', 1), ('entrepreneurial', 1), ('coue', 1), ('matth', 1), ('twintowers', 1), ('positives', 1), ('associatif', 1), ('56', 1), ('tartes', 1), ('stains', 1), ('germain', 1), ('archi', 1), ('sofia', 1), ('mat', 1), ('assurera', 1), ('loire', 1), ('desden', 1), ('dequipements', 1), ('demontrer', 1), ('consiste', 1), ('identifier', 1), ('medail', 1), ('franchise', 1), ('chiffres', 1), ('explosifs', 1), ('pste', 1), ('mouvem', 1), ('intervenir', 1), ('occupe', 1), ('durabilite', 1), ('hot', 1), ('reellement', 1), ('redevient', 1), ('investi', 1), ('panel', 1), ('retraite', 1), ('societeapprenante', 1), ('deplorent', 1), ('hommes', 1), ('irreproch', 1), ('intuitif', 1), ('maif', 1), ('alles', 1), ('concret', 1), ('taekwendo', 1), ('investis', 1), ('lucie', 1), ('lenne', 1), ('etudiante', 1), ('specialise', 1), ('chapoy', 1), ('consultant', 1), ('julian', 1), ('jappert', 1), ('think', 1), ('tank', 1), ('citoyennete', 1), ('serons', 1), ('gerke', 1), ('plait', 1), ('inquiets', 1), ('fasse', 1), ('bourdes', 1), ('ecoutez', 1), ('sciences', 1), ('victor', 1), ('bacques', 1), ('etudiant', 1), ('onekites', 1), ('respectueux', 1), ('lambassade', 1), ('bretagne', 1), ('lexperien', 1), ('londres2012', 1), ('soustons', 1), ('uk', 1), ('icss', 1), ('33', 1), ('hamouri', 1), ('empiff', 1), ('parmentier', 1), ('philou', 1), ('flou', 1), ('lambert75', 1), ('cci', 1), ('collaborations', 1), ('complement', 1), ('noues', 1), ('creations', 1), ('demplois', 1), ('cidj', 1), ('bearn', 1), ('soule', 1), ('prefaceur', 1), ('qualifica', 1), ('croyable', 1), ('aleatoires', 1), ('18h', 1), ('inra', 1), ('eps', 1), ('jaures', 1), ('tenable', 1), ('sav', 1), ('frenchturism', 1), ('visit', 1), ('vienne', 1), ('agreable', 1), ('39e', 1), ('40e', 1), ('maitresse', 1), ('cm1', 1), ('cm2', 1), ('sinteresse', 1), ('hemiplegique', 1), ('compta', 1), ('colonne', 1), ('agit', 1), ('politiq', 1), ('reduire', 1), ('ferais', 1), ('lsc', 1), ('louvrage', 1), ('concurrent', 1), ('rabattu', 1), ('ores', 1), ('crai', 1), ('ages', 1), ('missions', 1), ('context', 1), ('entendrez', 1), ('formidabl', 1), ('refait', 1), ('annoncee', 1), ('institution', 1), ('fermera', 1), ('pmi', 1), ('franciliennes', 1), ('creatrices', 1), ('barsacq', 1), ('regionaux', 1), ('pyeonchang2018', 1), ('cbre', 1), ('attractives', 1), ('210', 1), ('philosophie', 1), ('exaequo', 1), ('prevention', 1), ('detablis', 1), ('plongeur', 1), ('blesoise', 1), ('emilie', 1), ('menuet', 1), ('ecoles', 1), ('gabriel', 1), ('incarne', 1), ('relayeur', 1), ('inaugurait', 1), ('pilote', 1), ('lemonde', 1), ('communistes', 1), ('martine', 1), ('web', 1), ('bouclera', 1), ('lorsqu', 1), ('modulaire', 1), ('provisoire', 1), ('15000', 1), ('paques', 1), ('catholic', 1), ('priere', 1), ('catholique', 1), ('chretiens', 1), ('papefrancois', 1), ('pape', 1), ('eglise', 1), ('recycle', 1), ('e1we', 1), ('thevoice', 1), ('detr', 1), ('shakeup', 1), ('piegeac', 1), ('simplem', 1), ('baiser', 1), ('sinquiete', 1), ('planetaire', 1), ('type', 1), ('13eme', 1), ('arrondissement', 1), ('facilement', 1), ('commercialisable', 1), ('dathletisme', 1), ('fourberie', 1), ('scapin', 1), ('foyers', 1), ('decents', 1), ('verrez', 1), ('democraties', 1), ('occidentales', 1), ('renonce', 1), ('degradation', 1), ('multi', 1), ('facette', 1), ('sauront', 1), ('respectu', 1), ('douche', 1), ('mignonne', 1), ('perf', 1), ('continuerai', 1), ('grecs', 1), ('pensaient', 1), ('persuadent', 1), ('engraissent', 1), ('depen', 1), ('3x3', 1), ('etouffe', 1), ('eduquer', 1), ('invente', 1), ('fair', 1), ('revuperer', 1), ('pots', 1), ('vins', 1), ('copieux', 1), ('versez', 1), ('march', 1), ('pam', 1), ('29', 1), ('nationales', 1), ('detudes', 1), ('mettront', 1), ('socialistes', 1), ('gouvernants', 1), ('franco', 1), ('ouver', 1), ('constates', 1), ('villages', 1), ('globalement', 1), ('roron', 1), ('architectural', 1), ('06', 1), ('attributions', 1), ('immobiliers', 1), ('hauteurs', 1), ('avalises', 1), ('verront', 1), ('surpris', 1), ('lit', 1), ('coupes', 1), ('lhospitalier', 1), ('smartmobility', 1), ('problematique', 1), ('dispendieux', 1), ('zero', 1), ('taxe', 1), ('distraction', 1), ('specifique', 1), ('securisation', 1), ('economiecirculaire', 1), ('improvisent', 1), ('defilees', 1), ('prefe', 1), ('quetes', 1), ('tr', 1), ('combler', 1), ('refuse', 1), ('negatif', 1), ('commandes', 1), ('ficelees', 1), ('renverront', 1), ('balle', 1), ('plier', 1), ('amont', 1), ('reproche', 1), ('aujour', 1), ('greve22mars', 1), ('lesmemesquitrinquent', 1), ('citation', 1), ('tiendrons', 1), ('linevitable', 1), ('consequence', 1), ('labsence', 1), ('bienetre', 1), ('incluant', 1), ('mecanismes', 1), ('avancees', 1), ('senat360', 1), ('mene', 1), ('clairs', 1), ('bizarre', 1), ('menti', 1), ('det', 1), ('probe', 1), ('attrib', 1), ('merdier', 1), ('sra', 1), ('changera', 1), ('chaos', 1), ('mouvemen', 1), ('canellas', 1), ('cost', 1), ('resterons', 1), ('ferons', 1), ('nexclut', 1), ('jo2014', 1), ('lourdement', 1), ('repos', 1), ('definitive', 1), ('institut', 1), ('hypoxiques', 1), ('meanwhilein', 1), ('douvrir', 1), ('destine', 1), ('ppl', 1), ('comb', 1), ('envisage', 1), ('seancepublique', 1), ('mixte', 1), ('paritaire', 1), ('mipim18', 1), ('damenagement', 1), ('montaigne', 1), ('ramassent', 1), ('teamforce', 1), ('etdemocratie', 1), ('sauvegarder', 1), ('etai', 1), ('jt20h', 1), ('letalk', 1), ('secretaire', 1), ('capteur', 1), ('dinfographie', 1), ('dapprendre', 1), ('guepe', 1), ('troque', 1), ('fleuret', 1), ('questionnee', 1), ('respe', 1), ('dinquietude', 1), ('inspections', 1), ('generales', 1), ('appellent', 1), ('accepter', 1), ('revolution', 1), ('envisagee', 1), ('quun', 1), ('oxford', 1), ('moyenne', 1), ('196', 1), ('importante', 1), ('honnetement', 1), ('catastrophes', 1), ('nues', 1), ('beachvolley', 1), ('argenteuil', 1), ('beach', 1), ('sacrifiees', 1), ('socialos', 1), ('heriton', 1), ('equitables', 1), ('na', 1), ('considerables', 1), ('flamber', 1), ('alertes', 1), ('officielles', 1), ('couru', 1), ('quiose', 1), ('etudiantes', 1), ('frederic', 1), ('mion', 1), ('tenus', 1), ('hautsdeseine', 1), ('considerablement', 1), ('justifications', 1), ('imprevisibles', 1), ('xviiieme', 1), ('preferez', 1), ('chemin', 1), ('intervenue', 1), ('previsions', 1), ('irrealistes', 1), ('texte', 1), ('pjlolympique', 1), ('elaboree', 1), ('cmp', 1), ('gou', 1), ('surendettee', 1), ('mascarade', 1), ('relire', 1), ('vigilants', 1), ('provoquer', 1), ('serieuse', 1), ('rassurez', 1), ('csg', 1), ('essorer', 1), ('conditionnel', 1), ('attendons', 1), ('meil', 1), ('sciemment', 1), ('grandement', 1), ('elyas', 1), ('bouadjadja', 1), ('ard', 1), ('montargis', 1), ('matchs', 1), ('consacree', 1), ('magouill', 1), ('jametal', 1), ('arg', 1), ('tahu', 1), ('lufuanitu', 1), ('danemark', 1), ('ave', 1), ('identifies', 1), ('epluche', 1), ('filtre', 1), ('derives', 1), ('alarmant', 1), ('titree', 1), ('noooooon', 1), ('onvarigoler', 1), ('jaune', 1), ('tenu', 1), ('jolatres', 1), ('bobards', 1), ('mau', 1), ('rassurant', 1), ('he', 1), ('attendez', 1), ('inspecti', 1), ('import', 1), ('suppose', 1), ('inquietudes', 1), ('mannes', 1), ('publici', 1), ('devriez', 1), ('angelamerkel', 1), ('allemagne', 1), ('assigne', 1), ('google', 1), ('apple', 1), ('annuel', 1), ('chateau', 1), ('chantilly', 1), ('estimes', 1), ('pilule', 1), ('psychologiquement', 1), ('norme', 1), ('indispensable', 1), ('canaux', 1), ('sotchi', 1), ('revanche', 1), ('inspecteurs', 1), ('jugeraient', 1), ('anormalement', 1), ('couteux', 1), ('explosion', 1), ('faveurs', 1), ('leuphorie', 1), ('provoque', 1), ('bouche', 1), ('his', 1), ('abandonner', 1), ('nddl', 1), ('sacrifier', 1), ('rites', 1), ('lindustrie', 1), ('creees', 1), ('entamee', 1), ('trav', 1), ('annecy', 1), ('couta', 1), ('culbutes', 1), ('consacre', 1), ('intermediaire', 1), ('craint', 1), ('fillage', 1), ('etr', 1), ('cashinvestigation', 1), ('stop', 1), ('commencant', 1), ('18milliards', 1), ('2milliards', 1), ('mineurs', 1), ('isoles', 1), ('130m', 1), ('iref', 1), ('kuros', 1), ('improbable', 1), ('figuerer', 1), ('ruse', 1), ('enfermer', 1), ('parfait', 1), ('populasse', 1), ('saigner', 1), ('authentique', 1), ('lhypothetique', 1), ('artificiel', 1), ('fe', 1), ('negociation', 1), ('avaient', 1), ('aler', 1), ('valident', 1), ('immoweek', 1), ('prenons', 1), ('signees', 1), ('truchot', 1), ('miss', 1), ('castagnette', 1), ('47', 1), ('mensuelle', 1), ('vigilanc', 1), ('signal', 1), ('reell', 1), ('intercommunal', 1), ('plui', 1), ('faisait', 1), ('couvert', 1), ('personnalites', 1), ('classement', 1), ('structurelle', 1), ('seconder', 1), ('castex', 1), ('cacher', 1), ('senateur', 1), ('vendus', 1), ('emprunte', 1), ('couloir', 1), ('iskenderov', 1), ('damateurs', 1), ('velibgate', 1), ('luc', 1), ('allaire', 1), ('superstitieux', 1), ('reconstitution', 1), ('jungle', 1), ('calais', 1), ('mentalites', 1), ('louve', 1), ('vigila', 1), ('pessimiste', 1), ('globalite', 1), ('beneficieront', 1), ('yannick', 1), ('evolution', 1), ('jparalympiques', 1), ('inspirants', 1), ('degage', 1), ('cfcross', 1), ('ffa', 1), ('plouay', 1), ('isfwsc2018', 1), ('crosscountry', 1), ('school', 1), ('bergesdelaseine', 1), ('concevoir', 1), ('syst', 1), ('kite', 1), ('e1matin', 1), ('suggestion', 1), ('notes', 1), ('quan', 1), ('deguisee', 1), ('souris', 1), ('x', 1), ('cartographie', 1), ('39', 1), ('iennes', 1), ('necessite', 1), ('gironde', 1), ('canyon', 1), ('conseiller', 1), ('troussel', 1), ('presi', 1), ('section', 1), ('lexigence', 1), ('encadrer', 1), ('prestataires', 1), ('amelioration', 1), ('executif', 1), ('fc', 1), ('a35mn', 1), ('6semaines', 1), ('telecom', 1), ('meritent', 1), ('tickets', 1), ('cryptomonnaie', 1), ('nba', 1), ('franchissement', 1), ('rent', 1), ('repere', 1), ('cabinets', 1), ('venant', 1), ('passionnante', 1), ('levolution', 1), ('softpower', 1), ('intern', 1), ('meneur', 1), ('titulaire', 1), ('bigballerbrand', 1), ('obtient', 1), ('sou', 1), ('gosse', 1), ('joff', 1), ('genovesi', 1), ('presentera', 1), ('exploitation', 1), ('ovg', 1), ('arenas', 1), ('admirable', 1), ('rat', 1), ('dates', 1), ('quiseront', 1), ('avances', 1), ('pourront', 1), ('remportant', 1), ('corporate', 1), ('recompensee', 1), ('remporte', 1), ('5e', 1), ('morin', 1), ('sprint', 1), ('choper', 1), ('oscille', 1), ('sensation', 1), ('crever', 1), ('nadot', 1), ('mdp', 1), ('ymca', 1), ('trevise', 1), ('campus', 1), ('lentrainement', 1), ('compense', 1), ('emissions', 1), ('co2', 1), ('mercato', 1), ('rdvdu', 1), ('profiteront', 1), ('interdir', 1), ('ue', 1), ('journeedesdroitsdesfemmes', 1), ('mix', 1), ('stationnement', 1), ('agents', 1), ('verbalise', 1), ('illegalement', 1), ('democratie', 1), ('causes', 1), ('tunnels', 1), ('extensions', 1), ('nappes', 1), ('phreatiques', 1), ('congrats', 1), ('connaissant', 1), ('racontait', 1), ('nage', 1), ('nageuse', 1), ('parcourir', 1), ('620', 1), ('km', 1), ('choisissant', 1), ('l1', 1), ('daniel', 1), ('auverlot', 1), ('vincennes', 1), ('encou', 1), ('jouent', 1), ('comedie', 1), ('larmes', 1), ('crocodiles', 1), ('zhao', 1), ('tingyang', 1), ('professeurs', 1), ('filles', 1), ('dircom', 1), ('sensdesmots', 1), ('wow', 1), ('rues', 1), ('pleines', 1), ('nids', 1), ('poules', 1), ('agathe', 1), ('pond', 1), ('gentiment', 1), ('49287', 1), ('agressions', 1), ('profonds', 1), ('subsistent', 1), ('conclusion', 1), ('pb18', 1), ('n2', 1), ('franconville', 1), ('elles', 1), ('ac', 1), ('mental', 1), ('inspirer', 1), ('coherence', 1), ('locales', 1), ('innovateur', 1), ('repondr', 1), ('figurer', 1), ('culturels', 1), ('emanciper', 1), ('letre', 1), ('ind', 1), ('rhumatologue', 1), ('lavaur', 1), ('sursis', 1), ('affichetoncoeur', 1), ('tennisdetable', 1), ('pingpongfrancais', 1), ('metamorphose', 1), ('hslvids', 1), ('hdr', 1), ('bestteam', 1), ('quavec', 1), ('rivesdeseine', 1), ('install', 1), ('desormais', 1), ('chasseurs', 1), ('scalp', 1), ('coupee', 1), ('punition', 1), ('vol', 1), ('participatif', 1), ('decloisonne', 1), ('interdisciplinaire', 1), ('clameurs', 1), ('eteintes', 1), ('brasilia', 1), ('abandonne', 1), ('48768', 1), ('thailande', 1), ('viole', 1), ('audois', 1), ('48521', 1), ('thierno', 1), ('oury', 1), ('barry', 1), ('ombre', 1), ('hebdo', 1), ('mousquetaires', 1), ('davis', 1), ('strade', 1), ('bianche', 1), ('poaefe2018', 1), ('lebanon', 1), ('viensvoirmontaf', 1), ('meetup', 1), ('pat', 1), ('terredejeux', 1), ('episode', 1), ('amelie', 1), ('fur', 1), ('huit', 1), ('peau', 1), ('48137', 1), ('centaines', 1), ('turques', 1), ('manifestent', 1), ('trocs', 1), ('geree', 1), ('courneuve', 1), ('excite', 1), ('juteuse', 1), ('effets', 1), ('association', 1), ('partenariale', 1), ('gra', 1), ('climatiques', 1), ('laide', 1), ('cecifoot', 1), ('pongistes', 1), ('dionysiennes', 1), ('chomis', 1), ('arri', 1), ('pluie', 1), ('endroit', 1), ('recup', 1), ('robert', 1), ('iger', 1), ('walt', 1), ('company', 1), ('detail', 1), ('47442', 1), ('firminy', 1), ('forgesleseaux', 1), ('anthony', 1), ('grognet', 1), ('deduis', 1), ('salai', 1), ('hierarchie', 1), ('menage', 1), ('homme', 1), ('xfemmes', 1), ('balance', 1), ('lourds', 1), ('46971', 1), ('animateur', 1), ('aere', 1), ('tentative', 1), ('agress', 1), ('facons', 1), ('principe', 1), ('46907', 1), ('montmagny', 1), ('pupponi', 1), ('intronise', 1), ('artisanat', 1), ('professionals', 1), ('135', 1), ('pur', 1), ('cyclos', 1), ('jetais', 1), ('linvite', 1), ('assurancechomage', 1), ('degouter', 1), ('competitif', 1), ('46811', 1), ('53', 1), ('guadeloupeenne', 1), ('triple', 1), ('medaillee', 1), ('46747', 1), ('ivres', 1), ('interpelles', 1), ('tentat', 1), ('flux', 1), ('nulle', 1), ('indiquee', 1), ('indigne', 1), ('coffret', 1), ('plonger', 1), ('jusquen', 1), ('milli', 1), ('cerebres', 1), ('donnait', 1), ('paysdeloire', 1), ('velodrome', 1), ('perm', 1), ('percevra', 1), ('betail', 1), ('orphelinat', 1), ('tanzanien', 1), ('cornel', 1), ('ngaleku', 1), ('children', 1), ('trouverez', 1), ('sejo', 1), ('revelation', 1), ('canard', 1), ('enchaine', 1), ('vaine', 1), ('270000', 1), ('thobois', 1), ('260000', 1), ('lanc', 1), ('demarrer', 1), ('latelier', 1), ('sdes', 1), ('sessions', 1), ('deleves', 1), ('interesses', 1), ('cesars2018', 1), ('injustifie', 1), ('motivation', 1), ('270k', 1), ('termines', 1), ('46181', 1), ('situatio', 1), ('vols', 1), ('fumeurs', 1), ('crack', 1), ('dealers', 1), ('nomis', 1), ('haddad', 1), ('evoquent', 1), ('territo', 1), ('monopoliser', 1), ('outil', 1), ('chirurgien', 1), ('implantation', 1), ('cheveux', 1), ('bouches', 1), ('euhhh', 1), ('cartonne', 1), ('lavance', 1), ('gerber', 1), ('davenir', 1), ('maria', 1), ('vaporise', 1), ('esclaves', 1), ('delocalises', 1), ('exploites', 1), ('prochainement', 1), ('46149', 1), ('armee', 1), ('preliminaire', 1), ('accuse', 1), ('davoi', 1), ('performances', 1), ('brevetblanc', 1), ('dnb', 1), ('reunit', 1), ('fonctionnaire', 1), ('coute', 1), ('fibre', 1), ('salair', 1), ('lannonce', 1), ('lo', 1), ('organ', 1), ('payons', 1), ('dirigeants', 1), ('revelees', 1), ('speculationsimmobilieres', 1), ('46053', 1), ('experiences', 1), ('professionnelles', 1), ('chaleureux', 1), ('46047', 1), ('porcheville', 1), ('surveillante', 1), ('agressee', 1), ('jouera', 1), ('pilo', 1), ('46021', 1), ('charente', 1), ('devons', 1), ('marquent', 1), ('ja', 1), ('denvoi', 1), ('dadministration', 1), ('official', 1), ('launch', 1), ('cesar2018', 1), ('45957', 1), ('kanam', 1), ('serieuses', 1), ('nt', 1), ('ff', 1), ('confirmes', 1), ('enregistrements', 1), ('collectes', 1), ('identification', 1), ('rassemblent', 1), ('confirmera', 1), ('benrabia', 1), ('linvitation', 1), ('hebergement', 1), ('dinterlogement', 1), ('115', 1), ('maxenc', 1), ('45326', 1), ('ambroise', 1), ('examen', 1), ('niveaux', 1), ('ouvrant', 1), ('nouvelleepreuve', 1), ('essaie', 1), ('rayonnent', 1), ('lozere', 1), ('artic', 1), ('pluri', 1), ('annuelle', 1), ('deposee', 1), ('tenais', 1), ('nigg', 1), ('rame', 1), ('renovee', 1), ('hypothetiques', 1), ('presidentdelarepublique', 1), ('carinne', 1), ('purpose', 1), ('devoile', 1), ('baisses', 1), ('dotations', 1), ('collect', 1), ('millionaire', 1), ('habitant', 1), ('rassure', 1), ('toyotamonde', 1), ('facil', 1), ('piscin', 1), ('yesterday', 1), ('hosted', 1), ('dinner', 1), ('with', 1), ('several', 1), ('companies', 1), ('courtoisie', 1), ('republicaine', 1), ('jaccueillais', 1), ('circo', 1), ('speculateurs', 1), ('confrontee', 1), ('hausse', 1), ('limmobilier', 1), ('flory', 1), ('onm', 1), ('newyork', 1), ('freecalling', 1), ('chargeyourphone', 1), ('jettent', 1), ('nourrir', 1), ('ogre', 1), ('privatise', 1), ('dedommagement', 1), ('incertitudes', 1), ('accroissement', 1), ('defaut', 1), ('candidatu', 1), ('16h30', 1), ('equip', 1), ('intel', 1), ('abat', 1), ('suivront', 1), ('pekin2022', 1), ('surtou', 1), ('lfi', 1), ('unes', 1), ('jjoo', 1), ('resseau', 1), ('ferree', 1), ('adoptez', 1), ('reponses', 1), ('questionnaire', 1), ('dirais', 1), ('prevenu', 1), ('decoupe', 1), ('eta', 1), ('8h', 1), ('34m', 1), ('6s', 1), ('pleiade', 1), ('primaire', 1), ('leognan', 1), ('testee', 1), ('deploiera', 1), ('alertons', 1), ('devastateur', 1), ('sncfordonnances', 1), ('evoquerez', 1), ('reserve', 1), ('fonciere', 1), ('cheminots', 1), ('syrie', 1), ('sylvievartan', 1), ('psgom', 1), ('ompsg', 1), ('4mp1ldm', 1), ('look', 1), ('reproduisez', 1), ('ambiances', 1), ('privee', 1), ('gant', 1), ('lights', 1), ('preserver', 1), ('eleveurs', 1), ('pse', 1), ('jentends', 1), ('vaultier', 1), ('ouvrage', 1), ('effectuent', 1), ('populatio', 1), ('terres', 1), ('agricoles', 1), ('beton', 1), ('consensus', 1), ('fraternite', 1), ('stupidite', 1), ('mepris', 1), ('banlieu', 1), ('interdi', 1), ('engendree', 1), ('dysfonctionnement', 1), ('structurel', 1), ('tomo', 1), ('inspirant', 1), ('viste', 1), ('lies', 1), ('ordonnance', 1), ('renoncement', 1), ('newsletter', 1), ('speciale', 1), ('inscri', 1), ('iront', 1), ('fichu', 1), ('gueules', 1), ('laid', 1), ('biodiversite', 1), ('rebelote', 1), ('classiquematin', 1), ('conduire', 1), ('reporter', 1), ('gr', 1), ('sentendre', 1), ('franc', 1), ('arrangements', 1), ('extinction', 1), ('isignyomaha', 1), ('coups', 1), ('venants', 1), ('sommet', 1), ('mention', 1), ('operationnelle', 1), ('pri', 1), ('agenda', 1), ('conseilregionalcentrevaldeloire', 1), ('regionale', 1), ('offensive', 1), ('volet', 1), ('admin', 1), ('infra', 1), ('complete', 1), ('indice', 1), ('rtlsoir', 1), ('logiq', 1), ('monfort', 1), ('patineuse', 1), ('reconvertie', 1), ('rigole', 1), ('duo', 1), ('termine', 1), ('5m93', 1), ('allstarperche', 1), ('quartus', 1), ('clermontferrand', 1), ('decideur', 1), ('rationnel', 1), ('telephones', 1), ('portables', 1), ('nouvelleaquitaine', 1), ('rends', 1), ('marine', 1), ('jul', 1), ('ceremo', 1), ('eteint', 1), ('fermerture', 1), ('inquieter', 1), ('boys', 1), ('bands', 1), ('alliage', 1), ('ceremoniedecloture', 1), ('exo', 1), ('inquiet', 1), ('avancement', 1), ('laicite', 1), ('abattage', 1), ('animaux', 1), ('instantanee', 1), ('absengorgement', 1), ('vif', 1), ('restera', 1), ('continent', 1), ('orig', 1), ('paradis', 1), ('publicite', 1), ('tait', 1), ('toyota', 1), ('handicapes', 1), ('educ', 1), ('rebondisses', 1), ('skieur', 1), ('battue', 1), ('puiseent', 1), ('vivem', 1), ('med', 1), ('bedier', 1), ('etudient', 1), ('deplacent', 1), ('fichent', 1), ('foutaise', 1), ('roi', 1), ('coince', 1), ('maribor', 1), ('slovenie', 1), ('adp', 1), ('aerogare', 1), ('ouvrira', 1), ('emprunta', 1), ('gallica', 1), ('starting', 1), ('block', 1), ('illustrati', 1), ('finales', 1), ('meaux', 1), ('econom', 1), ('exonerer', 1), ('dinguerie', 1), ('poney', 1), ('voient', 1), ('multiplier', 1), ('sensible', 1), ('modification', 1), ('6000', 1), ('doses', 1), ('asthme', 1), ('emportees', 1), ('norvege', 1), ('38', 1), ('actuelles', 1), ('bailleurs', 1), ('hlm', 1), ('autorises', 1), ('acquerir', 1), ('destines', 1), ('boules', 1), ('universel', 1), ('raaa', 1), ('bets', 1), ('mom', 1), ('transmet', 1), ('informations', 1), ('donnees', 1), ('numeriques', 1), ('liees', 1), ('vertone', 1), ('ligne14', 1), ('ligne15', 1), ('repoussee', 1), ('oisienne', 1), ('reportee', 1), ('2027', 1), ('tendances', 1), ('rouler', 1), ('cantwait', 1), ('mentionnees', 1), ('confirmees', 1), ('troncons', 1), ('acceleres', 1), ('leconomie', 1), ('1vaste', 1), ('fumisterie', 1), ('payera', 1), ('rives', 1), ('utilisation', 1), ('menees', 1), ('faculte', 1), ('rodez', 1), ('etudia', 1), ('remy', 1), ('postes', 1), ('commercial', 1), ('lurbanisme', 1), ('democratiser', 1), ('1000', 1), ('signatures', 1), ('charniere', 1), ('souhaitons', 1), ('positionnement', 1), ('automatique', 1), ('regate', 1), ('senior', 1), ('bagnolet', 1), ('regrette', 1), ('quune', 1), ('javais', 1), ('evoquee', 1), ('exploree', 1), ('affaire', 1), ('interconnexions', 1), ('supermetro', 1), ('passes', 1), ('ous', 1), ('perdus', 1), ('retenu', 1), ('loffre', 1), ('allie', 1), ('kilian', 1), ('organiseront', 1), ('perennite', 1), ('frequentati', 1), ('evolue', 1), ('alfortville', 1), ('aff', 1), ('montparnasse', 1), ('persuade', 1), ('saura', 1), ('circonscription', 1), ('7801', 1), ('fondamental', 1), ('urbanism', 1), ('report', 1), ('jo2032', 1), ('arriere', 1), ('siecle', 1), ('technologique', 1), ('anouck', 1), ('jaubert', 1), ('descalade', 1), ('lescalade', 1), ('lesmureaux', 1), ('museification', 1), ('departements', 1), ('reforme', 1), ('collterr', 1), ('accellerateur', 1), ('lacademie', 1), ('papiers', 1), ('confbb', 1), ('impacter', 1), ('negativement', 1), ('conclue', 1), ('dsden', 1), ('72', 1), ('allemands', 1), ('rese', 1), ('promettre', 1), ('chant', 1), ('dernieres', 1), ('lefigaro', 1), ('irfo', 1), ('tribunal', 1), ('sanctionne', 1), ('incompetence', 1), ('dispositions', 1), ('simplifier', 1), ('dinfr', 1), ('bec', 1), ('ongles', 1), ('deuxieme', 1), ('championnats', 1), ('angers', 1), ('49', 1), ('205', 1), ('souterrain', 1), ('fonctionnels', 1), ('telespectateurs', 1), ('modernisee', 1), ('intelligentes', 1), ('marins', 1), ('d1f', 1), ('tumultueuse', 1), ('tonya', 1), ('harding', 1), ('integration', 1), ('offshore', 1), ('disneytousenforme', 1), ('lacombe', 1), ('couvrirait', 1), ('accorder', 1), ('grco', 1), ('sta', 1), ('immercurien', 1), ('canoekayak', 1), ('redit', 1), ('ret', 1), ('surcroit', 1), ('circ', 1), ('genie', 1), ('civil', 1), ('1pantheonsorbonne', 1), ('combine', 1), ('performance2024', 1), ('usepiades', 1), ('decoles', 1), ('bigdata', 1), ('allumera', 1), ('carabine', 1), ('fraioli', 1), ('bie', 1), ('profitons', 1), ('chaines', 1), ('tableau', 1), ('sevran', 1), ('ecoutant', 1), ('dinvitation', 1), ('regulierement', 1), ('echogeo', 1), ('hg', 1), ('geographie', 1), ('liera', 1), ('scientifique', 1), ('jedemande', 1), ('discuter', 1), ('deshabiller', 1), ('emouvante', 1), ('confirmer', 1), ('quautant', 1), ('masculin', 1), ('cible', 1), ('legrand', 1), ('xxl', 1), ('felicitions', 1), ('superb', 1), ('sponsoriser', 1), ('lescrimeuse', 1), ('laurence', 1), ('epee', 1), ('tit', 1), ('pouce', 1), ('sacree', 1), ('2108', 1), ('juges', 1), ('mentent', 1), ('disent', 1), ('sable', 1), ('signez', 1), ('petitio', 1), ('cabochon', 1), ('creatifs', 1), ('europa', 1), ('joueur', 1), ('100e', 1), ('qualifs', 1), ('additionnels', 1), ('voyages', 1), ('hong', 1), ('kong', 1), ('dubai', 1), ('manilles', 1), ('prostates', 1), ('joclub', 1), ('gaule', 1), ('mont', 1), ('dopinion', 1), ('lan', 1), ('collegiens', 1), ('hockey', 1), ('faur', 1), ('qualifier', 1), ('dircab', 1), ('naturel', 1), ('roches', 1), ('diable', 1), ('animer', 1), ('ol', 1), ('souvre', 1), ('horizons', 1), ('bourgogne', 1), ('aveu', 1), ('faiblesse', 1), ('marseille2024', 1), ('planvoile', 1), ('initie', 1), ('rehabilite', 1), ('antidopage', 1), ('agrandissement', 1), ('prepatation', 1), ('ligues', 1), ('jusquau', 1), ('tentes', 1), ('files', 1), ('dattente', 1), ('laeroport', 1), ('cesser', 1), ('polluer', 1), ('hi', 1), ('lannee', 1), ('binom', 1), ('inbound', 1), ('oublions', 1), ('activations', 1), ('teasing', 1), ('arrivera', 1), ('lagence', 1), ('ubi', 1), ('rapportonesta', 1), ('bouge', 1), ('laffaire', 1), ('acteu', 1), ('goldenblocks', 1), ('demploi', 1), ('offi', 1), ('immersion', 1), ('impliquent', 1), ('responsabilite', 1), ('appropriation', 1), ('technologies', 1), ('conviendra', 1), ('perenniser', 1), ('hesite', 1), ('lhorizon', 1), ('contenu', 1), ('chiffredelasemaine', 1), ('europacity', 1), ('architectes', 1), ('alliages', 1), ('bjarkeingels', 1), ('encourageant', 1), ('colossal', 1), ('publi', 1), ('steph', 1), ('etape', 1), ('etatsgenerauxpolitiquedelaville', 1), ('rounds', 1), ('durs', 1), ('boxe', 1), ('fightspirit', 1), ('hackaton', 1), ('opendata', 1), ('selectionnee', 1), ('virginie', 1), ('infrastr', 1), ('souhaites', 1), ('acheves', 1), ('securise', 1), ('citedeleconomie', 1), ('dheloise', 1), ('fixer', 1), ('palier', 1), ('definitif', 1), ('franchir', 1), ('complementarite', 1), ('electri', 1), ('beneficie', 1), ('disposi', 1), ('insupportable', 1), ('neige', 1), ('verglas', 1), ('eau', 1), ('inzinzaclochrist', 1), ('competitio', 1), ('impa', 1), ('relance', 1), ('renforces', 1), ('panique', 1), ('acceler', 1), ('commentair', 1), ('detections', 1), ('relative', 1), ('attendait', 1), ('apprehension', 1), ('fermeture', 1), ('fragile', 1), ('grandpalais', 1), ('rencontres', 1), ('tenant', 1), ('der', 1), ('lacces', 1), ('laccompagnement', 1), ('internatio', 1), ('liaison180', 1), ('conges', 1), ('regaler', 1), ('saintvalentin2018', 1), ('googlealerts', 1), ('laeticia', 1), ('smet', 1), ('allument', 1), ('serviront', 1), ('remet', 1), ('lamottebeuvron', 1), ('ideale', 1), ('epreuvesequestres', 1), ('loiretcher', 1), ('annoncent', 1), ('renouveau', 1), ('mediatisation', 1), ('publiquement', 1), ('retransmettre', 1), ('antennes', 1), ('tf1', 1), ('retire', 1), ('dotation', 1), ('affecte', 1), ('accorhotels', 1), ('bouyg', 1), ('benevolat', 1), ('vante', 1), ('revers', 1), ('reluisant', 1), ('disposer', 1), ('lordre', 1), ('deur', 1), ('endirect', 1), ('ski', 1), ('reinventer', 1), ('ecomobilite', 1), ('couverture', 1), ('feminins', 1), ('lescrime', 1), ('lamelioration', 1), ('precieuse', 1), ('exposer', 1), ('societaux', 1), ('voitures', 1), ('200000km', 1), ('parcourus', 1), ('z', 1), ('destinees', 1), ('ceis', 1), ('cybersecurite', 1), ('ev', 1), ('diffuseurs', 1), ('alliant', 1), ('oubliais', 1), ('trappes', 1), ('elancourt', 1), ('airbus', 1), ('trafique', 1), ('cac40', 1), ('fuel', 1), ('lourd', 1), ('descendants', 1), ('forcement', 1), ('opposants', 1), ('debuts', 1), ('annecy2018', 1), ('brise', 1), ('graal', 1), ('rate', 1), ('cnp', 1), ('biathlon', 1), ('hopital', 1), ('censee', 1), ('asiatique', 1), ('vivez', 1), ('faque', 1), ('bejing2022', 1), ('trouvee', 1), ('agrandir', 1), ('cosma', 1), ('arcuei', 1), ('merites', 1), ('sympathie', 1), ('federer', 1), ('hs', 1), ('n15', 1), ('532', 1), ('expulses', 1), ('uber', 1), ('al', 1), ('inde', 1), ('plupart', 1), ('fermee', 1), ('courant', 1), ('mobility', 1), ('recomm', 1), ('environ', 1), ('700000', 1), ('croissant', 1), ('pouvons', 1), ('federatrice', 1), ('synthetisant', 1), ('modernite', 1), ('02', 1), ('23h', 1), ('dense', 1), ('pyongyang2018', 1), ('francophonie', 1), ('envisager', 1), ('collab', 1), ('lelite', 1), ('concentrer', 1), ('lapres', 1), ('realise', 1), ('publiera', 1), ('sop2018', 1), ('jeremiasz', 1), ('colomb', 1), ('amerique', 1), ('bateau', 1), ('derogatoire', 1), ('relatifs', 1), ('superflu', 1), ('pieces', 1), ('degueu', 1), ('villepoubelle', 1), ('selectionne', 1), ('boost', 1), ('coupedumondefeminine2019', 1), ('coupedumonderugby2023', 1), ('sensibilises', 1), ('annoncerons', 1), ('oly', 1), ('legalisons', 1), ('privatisations', 1), ('temporaires', 1), ('europeen', 1), ('realite', 1), ('competence', 1), ('creen', 1), ('expropriations', 1), ('contraignante', 1), ('adj', 1), ('martinique', 1), ('nilor', 1), ('envisagerait', 1), ('ahahahaha', 1), ('navigo', 1), ('augmentera', 1), ('devenue', 1), ('even', 1), ('rappelons', 1), ('dispose', 1), ('ra', 1), ('procedures', 1), ('credit', 1), ('ad', 1), ('hoc', 1), ('destinee', 1), ('etoffe', 1), ('laiss', 1), ('j1', 1), ('vichy', 1), ('honore', 1), ('guillaume', 1), ('lecuivre', 1), ('igen', 1), ('annoncera', 1), ('educa', 1), ('legoff', 1), ('dexception', 1), ('exc', 1), ('leurope', 1), ('passent', 1), ('conte', 1), ('supplement', 1), ('handis', 1), ('severe', 1), ('mediocrite', 1), ('icdnews', 1), ('levenementasso', 1), ('carettej', 1), ('croitre', 1), ('whatelse', 1), ('originalite', 1), ('reside', 1), ('lidee', 1), ('1992', 1), ('dotee', 1), ('specialistes', 1), ('aguerris', 1), ('monvote', 1), ('habiter', 1), ('plsu', 1), ('curieux', 1), ('laporte', 1), ('jeff', 1), ('koons', 1), ('suivra', 1), ('aveuglement', 1), ('seigneurs', 1), ('dieux', 1), ('gagnons', 1), ('industrie', 1), ('viable', 1), ('planter', 1), ('couillons', 1), ('arle', 1), ('6eme', 1), ('lenfance', 1), ('playithuman', 1), ('reconnaiss', 1), ('aimes', 1), ('komox', 1), ('associee', 1), ('coupedumonde2023', 1), ('essonne', 1), ('oportunite', 1), ('yeesss', 1), ('xvde', 1), ('promouvant', 1), ('bonnnn', 1), ('souhaitant', 1), ('pensent', 1), ('lentra', 1), ('verdict', 1), ('crossunss', 1), ('vesoul', 1), ('1100', 1), ('parents', 1), ('etabliss', 1), ('dignement', 1), ('lelan', 1), ('susc', 1), ('profe', 1), ('retarde', 1), ('omnibus', 1), ('massy', 1), ('trajet', 1), ('sceptique', 1), ('soft', 1), ('power', 1), ('worldcup2023', 1), ('eclat', 1), ('billet', 1), ('winner', 1), ('avan', 1), ('mecanicien', 1), ('perl', 1), ('journaliste', 1), ('incontournable', 1), ('vieillir', 1), ('automne', 1), ('capharnaum', 1), ('annus', 1), ('horribilis', 1), ('cdmhand2017', 1), ('cdm2019', 1), ('afondlaforme', 1), ('presentent', 1), ('cestlahontequand', 1), ('serre', 1), ('ceinture', 1), ('securitesociale', 1), ('feminisation', 1), ('gaygames2018', 1), ('fifawwc2019', 1), ('raler', 1), ('doublette', 1), ('22m', 1), ('coupedumonde2018', 1), ('coupedumonde2019', 1), ('lacroissance', 1), ('conjuguer', 1), ('caisse', 1), ('vides', 1), ('banquier', 1), ('achat', 1), ('monnaie', 1), ('periode', 1), ('entiere', 1), ('ramene', 1), ('voudrais', 1), ('tricherie', 1), ('exempte', 1), ('appro', 1), ('marketin', 1), ('rugbyworldcup', 1), ('arrete', 1), ('toujoursplus', 1), ('combo', 1), ('sucre', 1), ('altitude', 1), ('designer', 1), ('mandataire', 1), ('liquidateur', 1), ('rwc', 1), ('demenager', 1), ('10eme', 1), ('waow', 1), ('chargees', 1), ('eurobasket2015', 1), ('vla', 1), ('cdm2023', 1), ('structurant', 1), ('defense', 1), ('retournement', 1), ('pates', 1), ('magiques', 1), ('wrc2023', 1), ('wc2019', 1), ('attribues', 1), ('hand2017', 1), ('word', 1), ('afrique', 1), ('fi', 1), ('rabault', 1), ('garanti', 1), ('wtf', 1), ('contrib', 1), ('intervalle', 1), ('lycees', 1), ('dm', 1), ('jugez', 1), ('tdtc', 1), ('reversibles', 1), ('consequences', 1), ('mustread', 1), ('worldwide', 1), ('quantite', 1), ('qualitative', 1), ('vice', 1), ('prsdt', 1), ('marcher', 1), ('linnovation', 1), ('versions', 1), ('logos', 1), ('adapter', 1), ('denismasseglia', 1), ('memoire', 1), ('65mds', 1), ('vale', 1), ('stations', 1), ('changez', 1), ('ligne8', 1), ('radio', 1), ('marchalnguyen', 1), ('doue', 1), ('moidevant', 1), ('ifsderriere', 1), ('1985', 1), ('faceties', 1), ('patisserie', 1), ('lmp', 1), ('lemeilleurpatissier', 1), ('adjoints', 1), ('creneaux', 1), ('comites', 1), ('construit', 1), ('discussions', 1), ('apparaitre', 1), ('clefs', 1), ('minute', 1), ('consideration', 1), ('marqueurs', 1), ('echangent', 1), ('reus', 1), ('bdnumerique', 1), ('devoilant', 1), ('pcq', 1), ('decouvertedesoi', 1), ('mang', 1), ('mondialisation', 1), ('emigration', 1), ('judokas', 1), ('judo', 1), ('acteurdedemain', 1), ('industrielle', 1), ('memorable', 1), ('contri', 1), ('strangersthings2', 1), ('concevons', 1), ('inedite', 1), ('martiens', 1), ('representent', 1), ('vraies', 1), ('excuses', 1), ('designsprint', 1), ('futuriste', 1), ('reveiller', 1), ('anati', 1), ('sprinters', 1), ('inventons', 1), ('mark', 1), ('tram', 1), ('fonctionnaires', 1), ('feram', 1), ('latin', 1), ('checker', 1), ('parlaient', 1), ('remboursez', 1), ('amas', 1), ('immondices', 1), ('strasbourg', 1), ('baignable', 1), ('facile', 1), ('abonnes', 1), ('signalisation', 1), ('prepares', 1), ('reconvers', 1), ('cathedrales', 1), ('demandez', 1), ('fictif', 1), ('concerme', 1), ('encouragements', 1), ('aborde', 1), ('teddydecima', 1), ('legende', 1), ('10x', 1), ('2x', 1), ('accueilli', 1), ('3mois', 1), ('regeneration', 1), ('physique', 1), ('echeances', 1), ('apollon', 1), ('lair', 1), ('guignols', 1), ('marchandises', 1), ('dirigents', 1), ('sourds', 1), ('aveugles', 1), ('quittons', 1), ('sature', 1), ('234', 1), ('colloquefiphfp', 1), ('marianne', 1), ('louradour', 1), ('seeph2017', 1), ('piloter', 1), ('communication', 1), ('mystere', 1), ('emmanuelle', 1), ('allumer', 1), ('1984', 1), ('2003', 1), ('jackralite', 1), ('decede', 1), ('togolais', 1), ('regnier', 1), ('jr', 1), ('certifier', 1), ('lecons', 1), ('dessinent', 1), ('fluidifier', 1), ('78000', 1), ('nopainnogain', 1), ('nevergiveup', 1), ('exigee', 1)]\n" ], [ "chars = sorted(list(set(tweets_str)))\nchar_to_int = dict((c, i) for i, c in enumerate(chars))\nprint(chars)\nlen(char_to_int)", "[' ', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']\n" ], [ "n_chars = len(tweets_str.split(sep=' '))\nn_vocab = len(chars)\nprint(n_chars)\nprint(n_vocab)", "54098\n37\n" ], [ "seq_length = 100\ndataX = []\ndataY = []\nfor i in range(0, n_chars - seq_length, 1):\n seq_in = tweets_str[i:i + seq_length]\n seq_out = tweets_str[i + seq_length]\n dataX.append([char_to_int[char] for char in seq_in])\n dataY.append(char_to_int[seq_out])\nn_patterns = len(dataX)\nprint(n_patterns)\n\n", "53998\n" ], [ "# reshape X to be [samples, time steps, features]\nX = np.reshape(dataX, (n_patterns, seq_length, 1))\n# normalize\nX = X / float(n_vocab)\n# one hot encode the output variable\ny = np_utils.to_categorical(dataY)", "_____no_output_____" ], [ "model = Sequential()\nmodel.add(LSTM(256, input_shape=(X.shape[1], X.shape[2]), return_sequences=True))\nmodel.add(Dropout(0.2))\nmodel.add(LSTM(256))\nmodel.add(Dropout(0.2))\nmodel.add(Dense(y.shape[1], activation='softmax'))\nmodel.compile(loss='categorical_crossentropy', optimizer='adam')", "_____no_output_____" ], [ "filepath=\"/content/gdrive/My Drive/weights-improvement-{epoch:02d}-{loss:.4f}-bigger3.hdf5\"\ncheckpoint = ModelCheckpoint(filepath, monitor='loss', verbose=1, save_best_only=True, mode='min')\ncallbacks_list = [checkpoint]", "_____no_output_____" ], [ "model.fit(X, y, epochs=30, batch_size=128, callbacks=callbacks_list)", "Epoch 1/30\n53998/53998 [==============================] - 204s 4ms/step - loss: 2.8107\n\nEpoch 00001: loss improved from inf to 2.81069, saving model to /content/gdrive/My Drive/weights-improvement-01-2.8107-bigger3.hdf5\nEpoch 2/30\n53998/53998 [==============================] - 203s 4ms/step - loss: 2.6043\n\nEpoch 00002: loss improved from 2.81069 to 2.60432, saving model to /content/gdrive/My Drive/weights-improvement-02-2.6043-bigger3.hdf5\nEpoch 3/30\n53998/53998 [==============================] - 203s 4ms/step - loss: 2.4907\n\nEpoch 00003: loss improved from 2.60432 to 2.49072, saving model to /content/gdrive/My Drive/weights-improvement-03-2.4907-bigger3.hdf5\nEpoch 4/30\n53998/53998 [==============================] - 203s 4ms/step - loss: 2.3632\n\nEpoch 00004: loss improved from 2.49072 to 2.36319, saving model to /content/gdrive/My Drive/weights-improvement-04-2.3632-bigger3.hdf5\nEpoch 5/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 2.2129\n\nEpoch 00005: loss improved from 2.36319 to 2.21293, saving model to /content/gdrive/My Drive/weights-improvement-05-2.2129-bigger3.hdf5\nEpoch 6/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 2.0550\n\nEpoch 00006: loss improved from 2.21293 to 2.05498, saving model to /content/gdrive/My Drive/weights-improvement-06-2.0550-bigger3.hdf5\nEpoch 7/30\n53998/53998 [==============================] - 203s 4ms/step - loss: 1.9227\n\nEpoch 00007: loss improved from 2.05498 to 1.92267, saving model to /content/gdrive/My Drive/weights-improvement-07-1.9227-bigger3.hdf5\nEpoch 8/30\n53998/53998 [==============================] - 203s 4ms/step - loss: 1.8201\n\nEpoch 00008: loss improved from 1.92267 to 1.82014, saving model to /content/gdrive/My Drive/weights-improvement-08-1.8201-bigger3.hdf5\nEpoch 9/30\n53998/53998 [==============================] - 203s 4ms/step - loss: 1.7393\n\nEpoch 00009: loss improved from 1.82014 to 1.73931, saving model to /content/gdrive/My Drive/weights-improvement-09-1.7393-bigger3.hdf5\nEpoch 10/30\n53998/53998 [==============================] - 203s 4ms/step - loss: 1.6731\n\nEpoch 00010: loss improved from 1.73931 to 1.67307, saving model to /content/gdrive/My Drive/weights-improvement-10-1.6731-bigger3.hdf5\nEpoch 11/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 1.6141\n\nEpoch 00011: loss improved from 1.67307 to 1.61408, saving model to /content/gdrive/My Drive/weights-improvement-11-1.6141-bigger3.hdf5\nEpoch 12/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 1.5576\n\nEpoch 00012: loss improved from 1.61408 to 1.55756, saving model to /content/gdrive/My Drive/weights-improvement-12-1.5576-bigger3.hdf5\nEpoch 13/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 1.5063\n\nEpoch 00013: loss improved from 1.55756 to 1.50633, saving model to /content/gdrive/My Drive/weights-improvement-13-1.5063-bigger3.hdf5\nEpoch 14/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 1.4546\n\nEpoch 00014: loss improved from 1.50633 to 1.45462, saving model to /content/gdrive/My Drive/weights-improvement-14-1.4546-bigger3.hdf5\nEpoch 15/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 1.4017\n\nEpoch 00015: loss improved from 1.45462 to 1.40169, saving model to /content/gdrive/My Drive/weights-improvement-15-1.4017-bigger3.hdf5\nEpoch 16/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 1.3649\n\nEpoch 00016: loss improved from 1.40169 to 1.36492, saving model to /content/gdrive/My Drive/weights-improvement-16-1.3649-bigger3.hdf5\nEpoch 17/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 1.3141\n\nEpoch 00017: loss improved from 1.36492 to 1.31409, saving model to /content/gdrive/My Drive/weights-improvement-17-1.3141-bigger3.hdf5\nEpoch 18/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 1.2741\n\nEpoch 00018: loss improved from 1.31409 to 1.27409, saving model to /content/gdrive/My Drive/weights-improvement-18-1.2741-bigger3.hdf5\nEpoch 19/30\n53998/53998 [==============================] - 201s 4ms/step - loss: 1.2337\n\nEpoch 00019: loss improved from 1.27409 to 1.23369, saving model to /content/gdrive/My Drive/weights-improvement-19-1.2337-bigger3.hdf5\nEpoch 20/30\n53998/53998 [==============================] - 201s 4ms/step - loss: 1.2017\n\nEpoch 00020: loss improved from 1.23369 to 1.20174, saving model to /content/gdrive/My Drive/weights-improvement-20-1.2017-bigger3.hdf5\nEpoch 21/30\n53998/53998 [==============================] - 201s 4ms/step - loss: 1.1651\n\nEpoch 00021: loss improved from 1.20174 to 1.16509, saving model to /content/gdrive/My Drive/weights-improvement-21-1.1651-bigger3.hdf5\nEpoch 22/30\n53998/53998 [==============================] - 200s 4ms/step - loss: 1.1287\n\nEpoch 00022: loss improved from 1.16509 to 1.12872, saving model to /content/gdrive/My Drive/weights-improvement-22-1.1287-bigger3.hdf5\nEpoch 23/30\n53998/53998 [==============================] - 199s 4ms/step - loss: 1.0936\n\nEpoch 00023: loss improved from 1.12872 to 1.09359, saving model to /content/gdrive/My Drive/weights-improvement-23-1.0936-bigger3.hdf5\nEpoch 24/30\n53998/53998 [==============================] - 201s 4ms/step - loss: 1.0606\n\nEpoch 00024: loss improved from 1.09359 to 1.06062, saving model to /content/gdrive/My Drive/weights-improvement-24-1.0606-bigger3.hdf5\nEpoch 25/30\n53998/53998 [==============================] - 200s 4ms/step - loss: 1.0320\n\nEpoch 00025: loss improved from 1.06062 to 1.03195, saving model to /content/gdrive/My Drive/weights-improvement-25-1.0320-bigger3.hdf5\nEpoch 26/30\n53998/53998 [==============================] - 201s 4ms/step - loss: 1.0012\n\nEpoch 00026: loss improved from 1.03195 to 1.00123, saving model to /content/gdrive/My Drive/weights-improvement-26-1.0012-bigger3.hdf5\nEpoch 27/30\n53998/53998 [==============================] - 201s 4ms/step - loss: 0.9779\n\nEpoch 00027: loss improved from 1.00123 to 0.97792, saving model to /content/gdrive/My Drive/weights-improvement-27-0.9779-bigger3.hdf5\nEpoch 28/30\n53998/53998 [==============================] - 201s 4ms/step - loss: 0.9507\n\nEpoch 00028: loss improved from 0.97792 to 0.95068, saving model to /content/gdrive/My Drive/weights-improvement-28-0.9507-bigger3.hdf5\nEpoch 29/30\n53998/53998 [==============================] - 202s 4ms/step - loss: 0.9234\n\nEpoch 00029: loss improved from 0.95068 to 0.92343, saving model to /content/gdrive/My Drive/weights-improvement-29-0.9234-bigger3.hdf5\nEpoch 30/30\n53998/53998 [==============================] - 201s 4ms/step - loss: 0.8976\n\nEpoch 00030: loss improved from 0.92343 to 0.89765, saving model to /content/gdrive/My Drive/weights-improvement-30-0.8976-bigger3.hdf5\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cbfffdaf4744a63127a5aa916424464c7b60aeaf
7,773
ipynb
Jupyter Notebook
oops.ipynb
jayaramoruganti777/fcspy
140d058b03f7bffd3e8aa3f4fa56877b92cda7e8
[ "MIT" ]
null
null
null
oops.ipynb
jayaramoruganti777/fcspy
140d058b03f7bffd3e8aa3f4fa56877b92cda7e8
[ "MIT" ]
null
null
null
oops.ipynb
jayaramoruganti777/fcspy
140d058b03f7bffd3e8aa3f4fa56877b92cda7e8
[ "MIT" ]
null
null
null
17.787185
66
0.43561
[ [ [ "\n# oops", "_____no_output_____" ] ], [ [ "lst = [1,2,3,4]", "_____no_output_____" ], [ "lst.count(3)", "_____no_output_____" ], [ "print(type([]))\nprint(type(1))\nprint(type(True))\nprint(type(\"jay\"))\nprint(type(()))\nprint(type({}))\nprint(type({'a','b'}))", "<class 'list'>\n<class 'int'>\n<class 'bool'>\n<class 'str'>\n<class 'tuple'>\n<class 'dict'>\n<class 'set'>\n" ], [ "#creating our class dummmy\n\nclass sampledample:\n pass\n\nx = sampledample()\nprint(type(x))", "<class '__main__.sampledample'>\n" ], [ "class Tiger:\n def __init__(self,breed):\n self.breed = breed\n\nsimba = Tiger(\"Bengal Tiger\")\nPK = Tiger(\"White Tiger\")", "_____no_output_____" ], [ "simba.breed", "_____no_output_____" ], [ "PK.breed", "_____no_output_____" ], [ "class Tiger:\n def __init__(self,breed,name):\n self.breed = breed\n self.name = name\n\nsimba = Tiger(\"Bengal Tiger\",\"simba\")\nPK = Tiger(\"White Tiger\",\"PK\")", "_____no_output_____" ], [ "simba.breed", "_____no_output_____" ], [ "PK.breed", "_____no_output_____" ], [ "print(simba.breed)\nprint(simba.name)", "Bengal Tiger\nsimba\n" ] ], [ [ "# methods in class", "_____no_output_____" ] ], [ [ "class Circle:\n \n Pi = 3.14159\n \n def __init__(self,radius=1):\n self.radius = radius\n self.area = radius * radius * Circle.Pi\n \n #method for getting cicumfference\n \n def getCircumference(self):\n return 2*self.Pi*self.radius\n \n def setRadius(self,new_radius):\n self.radius = new_radius\n self.area = new_radius * new_radius * self.Pi\n \njay = Circle()", "_____no_output_____" ], [ "jay.area", "_____no_output_____" ], [ "print(jay.area)\nprint(jay.radius)\nprint(jay.getCircumference())\nprint(jay.setRadius(6))\nprint(jay.radius)\nprint(jay.radius)\nprint(jay.getCircumference())", "3.14159\n1\n6.28318\nNone\n6\n6\n37.699079999999995\n" ] ], [ [ "# inheritance", "_____no_output_____" ] ], [ [ "class cat:\n def __init__(self,name):\n self.name = name\n print(self.name + \" cat got created\")\n def talk(self):\n return self.name + ' says meeeowww'\n \n \nclass minu(cat):\n def __init__(self):\n cat.__init__(self,'minu')\n print(\"minu got created\")\n \n def drink(self):\n print(\"drinks milk !\")", "_____no_output_____" ], [ "m = minu()", "minu cat got created\nminu got created\n" ], [ "m.talk ()", "_____no_output_____" ], [ "m.drink()", "drinks milk !\n" ], [ "m.name", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cbffff4ca2bab4021cc0a473c5367028da51e5c9
33,053
ipynb
Jupyter Notebook
Python_IE411/hw7.ipynb
Rothdyt/codes-for-courses
a2dfea516ebc7cabef31a5169533b6da352e7ccb
[ "MIT" ]
4
2018-09-23T00:00:13.000Z
2018-11-02T22:56:35.000Z
Python_IE411/hw7.ipynb
Rothdyt/codes-for-courses
a2dfea516ebc7cabef31a5169533b6da352e7ccb
[ "MIT" ]
null
null
null
Python_IE411/hw7.ipynb
Rothdyt/codes-for-courses
a2dfea516ebc7cabef31a5169533b6da352e7ccb
[ "MIT" ]
null
null
null
61.55121
14,206
0.630714
[ [ [ "<div align=\"center\">\n <h1>Homework 7</h1>\n <p>\n <div align=\"center\">\n <h2>Yutong Dai [email protected]</h2>\n </div>\n </p>\n</div>", "_____no_output_____" ], [ "## 6.33\n\n\nThe dual problem is \n$$\n\\begin{align}\n& \\min \\quad 3 w_1 + 6 w_2\\\\\n& s.t \\quad w_1 + 2w_2 \\geq 2\\\\\n& \\qquad w_1 + 3w_2 \\geq -3\\\\\n& \\qquad w_1\\leq 0,w_2\\geq 0\n\\end{align}\n$$\n\nIt's easy to verify $(w_1^*,w_2^*)=(\\frac{11}{-2}, \\frac{5}{2})$ is a feasible solution to the dual and satisfy the KKT condition. Therefore, $(x_1^*,x_2^*)=(3/2, 3/2)$ is the optimal solution to the dual.\n\n---\n\n* The first method is \"Big-M\" method. First convert the problem to the standard form and adding the artificial variables, where they serve as the initial basis. \n\n* The second method is the artificial constraints technique, where we adding a upper bound on the summation of all non-basic variables.\n\nI will use the second method.\n\nThe tableau for the primal is as follow, where the dual is not feasible.\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 2 | -3 | 0 | 0 | 0 |\n| $x_3$ | 0 | -1 | -1 | 1 | 0 | -3 |\n| $x_4$ | 0 | 3 | 1 | 0 | 1 | 6 |\n\nAdding constrain $x_1 + x_2 \\leq M$, we have the following tableau.\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 2 | -3 | 0 | 0 | 0 | 0 |\n| $x_5$ | 0 | 1 | 1 | 0 | 0 | 1 | M |\n| $x_3$ | 0 | -1 | -1 | 1 | 0 | 0 | -3 |\n| $x_4$ | 0 | 3 | 1 | 0 | 1 | 0 | 6 |\n\n* The first iteration:\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | -5 | 0 | 0 | -2 | -2M |\n| $x_1$ | 0 | 1 | 1 | 0 | 0 | 1 | M |\n| $x_3$ | 0 | 0 | 0 | 1 | 0 | 1 | -3 + M |\n| $x_4$ | 0 | 0 | -2 | 0 | 1 | -3 | 6 -3M |\n\n* The second iteration:\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | -11/3 | 0 | -2/3 | 0 | -4 |\n| $x_1$ | 0 | 1 | 1/3 | 0 | 1/3 | 0 | 2 |\n| $x_3$ | 0 | 0 | -2/3 | 1 | 1/3 | 0 | -1 |\n| $x_5$ | 0 | 0 | 2/3 | 0 | -1/3 | 1 | M-2 |\n\n* The third iteration:\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | 0 | -11/2 | -5/2 | 0 | 3/2 |\n| $x_1$ | 0 | 1 | 0 | 1/2 | 1/2 | 0 | 3/2 |\n| $x_2$ | 0 | 0 | 1 | -3/2 | -1/2 | 0 | 3/2 |\n| $x_4$ | 0 | 0 | 0 | 1 | 0 | 1 | M-2 |\n\nSo the optimal solution for the primal is $(3/2, 3/2)$.", "_____no_output_____" ], [ "## 6.54\n\n**a)**\n\nThe dual problem is \n$$\n\\begin{align}\n& \\min \\quad 8w_1 + 4w_2\\\\\n& s.t \\quad w_1 - w_2 \\geq 2\\\\\n& \\qquad 2w_1 - w_2 \\geq 1\\\\\n& \\qquad 3w_1 - 2w_2 \\geq -1\\\\\n& \\qquad w_1\\leq 0,w_2\\geq 0\n\\end{align}\n$$\n\nSince the constraints in the primal are of $\\leq$ type, we know that the optimal solution for the dual is $(2,0)$ .", "_____no_output_____" ], [ "**b)**\n\nNote $x_2$ is a non-basic feasible solution and $c_2' - z_2=1>0$, therefore $x_2$ will enter the basis and change the optimal solution. \n\nThe tableau becomes\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | 1 | -3 | -2 | 0 | -16 |\n| $x_1$ | 0 | 1 | 2 | 1 | 1 | 0 | 8 |\n| $x_5$ | 0 | 0 | 3 | -1 | 1 | 1 | 12 |\n\nAfter one iteration, we reach the optimal tableau.\n\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | 0 | -3/2 | -3/2 | 0 | -20 |\n| $x_2$ | 0 | 1/2 | 0 | 1/2 | 1/2 | 0 | 4 |\n| $x_5$ | 0 | -1/2 | 0 | -3/2 | 1/2 | 1 | 0 |\n\nThe new optimal solution becomes $(x_1, x_2, x_3)=(0,4,0)$", "_____no_output_____" ], [ "**c)**\n\nNote $x_2$ is a non-basic feasible solution and $c_2 - c_B^TB^{-1}A_j'=1-1/3=2/3>0$, therefore $x_2$ will enter the basis and change the optimal solution. \n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | 2/3 | -3 | -2 | 0 | -16 |\n| $x_1$ | 0 | 1 | 1/6 | 1 | 1 | 0 | 8 |\n| $x_5$ | 0 | 0 | 7/6 | -1 | 1 | 1 | 12 |\n\nAfter one iteration, we reach the optimal tableau.\n\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | 0 | -17/7 | -18/7 | -4/7 | -28 |\n| $x_1$ | 0 | 1 | 0 | 8/7 | 6/7 | -1/7 | 44/7 |\n| $x_2$ | 0 | 0 | 1 | -6/7 | 1/7 | 6/7 | 72/7 |\n\nThe new optimal solution becomes $(x_1, x_2, x_3)=(44/7,72/7,0)$", "_____no_output_____" ], [ "**d)**\n\nSet up the tableau as\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | -3 | -3 | -2 | 0 | 0 | -16 |\n| $M$ | -1 | 0 | 0 | 0 | 0 | 0 | -1 | 0 |\n| $x_1$ | 0 | 1 | 2 | 1 | 1 | 0 | 0 | 8 |\n| $x_5$ | 0 | 0 | 3 | -1 | 1 | 1 | 0 | 12 |\n| $x_6$ | 0 | 0 | 1 | 2 | 0 | 0 | 1 | 3 |\n\nand make $x_6$ as true basic variable by adding the last row to the zero row. We obtain\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | -3 | -3 | -2 | 0 | 0 | -16 |\n| $M$ | -1 | 1 | 2 | 0 | 0 | 0 | 0 | 3 |\n| $x_1$ | 0 | 1 | 2 | 1 | 1 | 0 | 0 | 8 |\n| $x_5$ | 0 | 0 | 3 | -1 | 1 | 1 | 0 | 12 |\n| $x_6$ | 0 | 0 | 1 | 2 | 0 | 0 | 1 | 3 |\n\nAfter one iteration,\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | -3/2 | 0 | -2 | 0 | 3/2 | -23/2 |\n| $M$ | -1 | 0 | 0 | 0 | 0 | 0 | -1 | 0 |\n| $x_1$ | 0 | 1 | 3/2 | 0 | 1 | 0 | -1/2 | 13/2 |\n| $x_5$ | 0 | 0 | 7/2 | 0 | 1 | 1 | 1/2 | 27/2 |\n| $x_6$ | 0 | 0 | 1/2 | 1 | 0 | 0 | 1/2 | 3/2 |\n\n\nwe reach the optimal. The new optimal solution becomes $(x_1, x_2, x_3)=(13/2, 0, 3/2)$", "_____no_output_____" ], [ "**e)**\n\nSuppose the new right-hand-side is $b'$. Then $B^{-1}b'=(b_1' , b_1'+ b_2')^T$. As we will increase 8 or 4 to $b_1'$ or $b_2'$. Either way will ensure $B^{-1}b'\\geq 0$, therefore, the same basis is still optimal.\n\n- If we change $b_1$ then, we will change the optimal solution from $(b_1,0,0)$ to $(b_1',0,0)$. It will increase the objective value by $2(b_1' -b_1)$\n\n- If we change $b_2$ then, we won't change the optimal solution $(b_1,0,0)$, hence the objective value.", "_____no_output_____" ], [ "**f)**\nAs $c_6 -x_6=6-wA_6=2>0$, $x_6$ will enter the basis.\n\nThe initial tableau is \n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | -3 | -3 | -2 | 0 | 2 | -16 |\n| $x_1$ | 0 | 1 | 2 | 1 | 1 | 0 | 2 | 8 |\n| $x_5$ | 0 | 0 | 3 | -1 | 1 | 1 | 3 | 12 |\n\nAfter one iteration, the tableau becomes\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | -1 | -5 | -4 | -3 | 0 | 0 | -24 |\n| $x_6$ | 0 | 1/2 | 1 | 1/2 | 1/2 | 0 | 1 | 4 |\n| $x_5$ | 0 | -3/2 | 0 | -5/2 | -1/2 | 1 | 0 | 0 |\n\nThe optimal solution is $(x_1, x_2, x_3,x_6)=(0,0,0,4)$\n", "_____no_output_____" ], [ "## 6.68\n\nBefore we proceed, we need to calculate a few quantity:\n\n * $(c_6,c_7,c_8)-(c_1,c_2,c_3)B^{-1}A_{[:,(6,7,8)]} = (\\bar c_6,\\bar c_7, \\bar c_8)=(-2,-1/10,-2) \\Rightarrow (c_1,c_2,c_3)=(2,4,1)$, where $A_{[:,(6,7,8)]}$ is $I_3$.\n * $(c_4,c_5) - (c_1,c_2,c_3)B^{-1}A_{[:,(3,4)]}=(\\bar c_4,\\bar c_5)=(-2,0)\\Rightarrow (c_4,c_5)=(3,2)$\n * $b=B\\bar b=(14/9, 110/3, 46/9)^T$", "_____no_output_____" ], [ "We perturbe the $b$ along the direction $d=(-1,0,0)^T$.\n\n**Iteration 1:**\n\n* Calculate $B^{-1}d = (-0.5, 1 , -5)^T$, So $S=\\{1,3\\}$.\n* Calculate the minimal ration $\\theta=7/5$.\n* If $\\theta\\in [0,7/5]$, the current basis $(A_1,A_2,A_3)$ is always optimal. Further, the objective value and right hand side will be\n\n$$\nz(\\theta) = 17 - 2\\theta \\qquad \\bar b = (3-\\frac{1}{2}\\theta, 1 + \\theta, 7-5\\theta)^T.\n$$\n\n* When $\\theta =7/5$, then $x_3=0$, therefore we perform dual simplex method on the tableau below.\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | $x_7$ | $x_8$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | 0 | 0 | -2 | 0 | -2 | -1/10 | -2 | -71/5 |\n| $x_1$ | 0 | 1 | 0 | 0 | -1 | 0 | 1/2 | 1/5 | -1 | 23/10 |\n| $x_2$ | 0 | 0 | 1 | 0 | 2 | 1 | -1 | 0 | 1/2 | 12/5 |\n| $x_3$ | 0 | 0 | 0 | 1 | -1 | -2 | 5 | -3/10 | 2 | 0 |\n\nSo $x_3$ will leave and $x_5$ will enter.\n\nThe tableau becomes\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | $x_7$ | $x_8$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | 0 | 0 | -2 | 0 | -2 | -1/10 | -2 | -71/5 |\n| $x_1$ | 0 | 1 | 0 | 0 | -1 | 0 | 1/2 | 1/5 | -1 | 23/10 |\n| $x_2$ | 0 | 0 | 1 | 1/2 | 3/2 | 0 | 3/2 | -3/20 | 3/2 | 12/5 |\n| $x_5$ | 0 | 0 | 0 | -1/2 | 1/2 | 1 | -5/2 | 3/20 | -1 | 0 |\n", "_____no_output_____" ], [ "**Iteration 2:**\n\n* Calculate $B^{-1}d = (-0.5, -1.5 , 2.5)^T$, $B^{-1}b=(3,4.5, -3.5)$So $S=\\{1,2\\}$.\n* Calculate the minimal ration $\\theta=3$.\n* If $\\theta\\in [7/5, 3]$, the current basis $(A_1,A_2,A_5)$ is always optimal. Further, the objective value and right hand side will be\n\n$$\nz(\\theta) = 17 - 2\\theta \\qquad \\bar b = (3-\\frac{1}{2}\\theta, \\frac{9}{2} - \\frac{3}{2} \\theta, \\frac{-7}{2}+\\frac{5}{2}\\theta)^T.\n$$\n\n* When $\\theta =3$, then $x_2=0$, therefore we perform dual simplex method on the tableau below.\n\nThe tableau becomes\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | $x_7$ | $x_8$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | 0 | 0 | -2 | 0 | -2 | -1/10 | -2 | -11 |\n| $x_1$ | 0 | 1 | 0 | 0 | -1 | 0 | 1/2 | 1/5 | -1 | 3/2 |\n| $x_2$ | 0 | 0 | 1 | 1/2 | 3/2 | 0 | 3/2 | -3/20 | 3/2 | 0 |\n| $x_5$ | 0 | 0 | 0 | -1/2 | 1/2 | 1 | -5/2 | 3/20 | -1 | 4 |\n\n\n\nSo $x_2$ will leave and $x_7$ will enter.\n\nThe tableau becomes\n\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | $x_7$ | $x_8$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | -2/3 | -1/3 | -3 | 0 | -3 | 0 | -3 | -11 |\n| $x_1$ | 0 | 0 | 1 | 4/3 | 2/3 | 0 | 5/2 | 0 | 1 | 3/2 |\n| $x_7$ | 0 | 0 | -20/3 | -10/3 | -10 | 0 | -10 | 1 | -10 | 0 |\n| $x_5$ | 0 | 0 | 1 | 0 | 2 | 1 | -1 | 0 | 1/2 | 4 |\n\n", "_____no_output_____" ], [ "**Iteration 3:**\n\n* Calculate $B^{-1}d = (-2.5, 10 , 1)^T$, $B^{-1}b=(9,-30,1)$So $S=\\{1\\}$.\n* Calculate the minimal ration $\\theta=18/5$.\n* If $\\theta\\in [3,18/5]$, the current basis $(A_1,A_7,A_5)$ is always optimal. Further, the objective value and right hand side will be\n\n$$\nz(\\theta) = 20 - 3\\theta \\qquad \\bar b = (9-\\frac{5}{2}\\theta, -30 + 10 \\theta, 1+\\theta)^T.\n$$\n\n* When $\\theta =18/5$, then $x_1=0$, therefore we perform dual simplex method on the tableau below.\n\n| | $z$ | $x_1$ | $x_2$ | $x_3$ | $x_4$ | $x_5$ | $x_6$ | $x_7$ | $x_8$ | RHS |\n| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |\n| $z$ | -1 | 0 | -2/3 | -1/3 | -3 | 0 | -3 | 0 | -3 | -46/5 |\n| $x_1$ | 0 | 0 | 1 | 4/3 | 2/3 | 0 | 5/2 | 0 | 1 | 0 |\n| $x_7$ | 0 | 0 | -20/3 | -10/3 | -10 | 0 | -10 | 1 | -10 | 6 |\n| $x_5$ | 0 | 0 | 1 | 0 | 2 | 1 | -1 | 0 | 1/2 | 23/5 |\n\nWe can not pivot anymore. Hence the algorithm terminates, which means the problem is infeasible for $\\theta > 18/5$.", "_____no_output_____" ], [ "## 6.72\n\n**a)**\n$$\n\\begin{align}\n& \\max \\quad 6w + \\min_{(x_1,x_2) \\in X} \\{(1-3w)x_1 + (2-w)x_2\\}\\\\\n& s.t \\quad w\\geq 0\n\\end{align}\n$$", "_____no_output_____" ], [ "**b)**\nThe minimal of $\\min_{(x_1,x_2) \\in X} \\{(1-3w)x_1 + (2-w)x_2\\}$ is obtained on one of the following extreme points \n\n$$(0,0), (8,0), (3,5), (0,2).$$\n\nPlug these four points into $f(w)$, we end up with\n\n$$f(w)=6w + \\min\\{0, 4-2w, 13-14w, 8-24w\\}.$$", "_____no_output_____" ], [ "**c)**\n$$\nf(w)=\n\\begin{cases}\n6w, & 0 \\leq w \\leq 1/3 \\\\\n8-18w, & w \\geq 1/3\n\\end{cases}\n$$", "_____no_output_____" ] ], [ [ "import numpy as np\nimport matplotlib.pyplot as plt\nplt.plot(1/3,2,'ro',markersize=10)\nx1 = np.linspace(0,1/3,10)\nx2 = np.linspace(1/3,1,10)\nplt.plot(x1,6*x1,'k-',label=r\"$z=6w$\")\nplt.plot(x2,8 - 18*x2,'k-', label=r\"$z=8-18w$\")\nplt.legend()\nplt.show()", "_____no_output_____" ] ], [ [ "**d)**\n\nThe optimal solution for the Lagrangian dual problem is $w=1/3$.", "_____no_output_____" ], [ "**e)**\n\nSince $w=1/3$, $f(w)=2 + \\min_{(x_1,x_2) \\in X}5/3 x_2=2$, we know $x_2=0$ and therefore $x_1=2$.\nSo the optimal solution for the primal $(x_1, x_2)=(2,0)$.", "_____no_output_____" ], [ "## Exercise 5.14", "_____no_output_____" ], [ "**a)**\n\n$$(c - 10d)' x = (c + 10d)' x \\Rightarrow d'x = 0.$$ \n\nThe same holds for $Ax = b + \\theta f$. Therefore, $5 d'x = 0$. As the optimality and feasibility conditions hold, the same basis remains optimal.", "_____no_output_____" ], [ "**b)**\n\nFor fixed $\\theta$, let $B$ be an arbitrary basis. Then we have $x=(X_B,X_N)=(B^{-1}(b+\\theta f),0)$. Suppose $\\{B^j\\}$ are all possible basis derived from A. Then our problem becomes\n\n$$f(\\theta) = \\underset{j}{\\text{min}} \\{(c+ \\theta d)' {B^j}^{-1}(b + \\theta f)\\}, $$\n where ${B^j}^{-1}(b + \\theta f) \\geq 0$.\n\n\n\nClearly, $f(\\theta)$ is a piecewise quadratic function of $\\theta$ if $f\\neq 0$ \n\n\nLet $K$ be the number of possible bases, then the upper bound on the number of pieces is $2K$.", "_____no_output_____" ], [ "**c)**\n\n\\begin{aligned}\n& \\text{minimize} && \\theta d'x \\\\\n& \\text{subject to} && Ax = \\theta f \\\\\n& && x \\geq 0\n\\end{aligned}\n\n\nLet $B$ be an optimal basis for $\\theta = 1$ and assume that $\\theta > 0$. $d' - d'_B B^{-1} A \\geq 0 \\text{ and } B^{-1}f \\geq 0$. Hence for nonnegative $\\theta$ satisfying $\\theta d' - d'_B B^{-1} A \\geq 0 \\text{ and } \\theta B^{-1}f \\geq 0$ keeps this same basis optimal.", "_____no_output_____" ], [ "**d)**\nConsider $b, f = 0$, $f(\\theta)$ is constant in $\\theta$, hence both convex and concave. ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
4a00071d5ef24fa08357f8b25503e02f2d9ea471
10,785
ipynb
Jupyter Notebook
content/courseware/assignment-cesm-control.ipynb
nfeldl/ClimateLaboratoryBook
05eb0395c0e07d3724e6569e160fbefc9829a990
[ "CC-BY-4.0" ]
1
2021-08-25T13:02:15.000Z
2021-08-25T13:02:15.000Z
content/courseware/assignment-cesm-control.ipynb
nfeldl/ClimateLaboratoryBook
05eb0395c0e07d3724e6569e160fbefc9829a990
[ "CC-BY-4.0" ]
null
null
null
content/courseware/assignment-cesm-control.ipynb
nfeldl/ClimateLaboratoryBook
05eb0395c0e07d3724e6569e160fbefc9829a990
[ "CC-BY-4.0" ]
2
2021-07-21T20:43:20.000Z
2021-08-25T13:02:16.000Z
44.20082
385
0.615299
[ [ [ "# Assignment: Global average budgets in the CESM pre-industrial control simulation", "_____no_output_____" ], [ "## Learning goals\n\nStudents completing this assignment will gain the following skills and concepts:\n\n- Continued practice working with the Jupyter notebook\n- Familiarity with atmospheric output from the CESM simulation\n- More complete comparison of the global energy budget in the CESM control simulation to the observations\n- Validation of the annual cycle of surface temperature against observations\n- Opportunity to formulate a hypothesis about these global temperature variations\n- Python programming skills: basic xarray usage: opening gridded dataset and taking averages", "_____no_output_____" ], [ "## Instructions\n\n- In a local copy of this notebook (on the JupyterHub or your own device) **add your answers in additional cells**.\n- **Complete the required problems** below. \n- Remember to set your cell types to `Markdown` for text, and `Code` for Python code!\n- **Include comments** in your code to explain your method as necessary.\n- Remember to actually answer the questions. **Written answers are required** (not just code and figures!)\n- Submit your solutions in **a single Jupyter notebook** that contains your text, your code, and your figures.\n- *Make sure that your notebook* ***runs cleanly without errors:***\n - Save your notebook\n - From the `Kernel` menu, select `Restart & Run All`\n - Did the notebook run from start to finish without error and produce the expected output?\n - If yes, save again and submit your notebook file\n - If no, fix the errors and try again.", "_____no_output_____" ], [ "## Problem 1: The global energy budget in the CESM control simulation\n\nCompute the **global, time average** of each of the following quantities, and **compare them to the observed values** from the Trenberth and Fasullo (2012) figure in the course notes. Recall that when you want to repeat an operation, you should write a function for it!\n\n- Solar Radiation budget:\n - Incoming Solar Radiation, or Insolation\n - Reflected Solar Radiation at the top of atmosphere\n - Solar Radiation Reflected by Surface\n - Solar Radiation Absorbed by Surface\n - Solar Radiation Refelected by Clouds and Atmosphere *(you can calculate this as the difference between the reflected radiation at the top of atmosphere and reflected radiation at the surface)*\n - Total Absorbed Solar Radiation (ASR) at the top of atmosphere\n - Solar Radiation Absorbed by Atmosphere *(you can calculate this as the residual of your budget, i.e. what's left over after accounting for all other absorption and reflection)*\n- Longwave Radiation budget:\n - Outgoing Longwave Radiation\n - Upward emission from the surface\n - Downwelling radiation at the surface\n- Other surface fluxes:\n - \"Thermals\", or *sensible heat flux*. *You will find this in the field called `SHFLX` in your dataset.*\n - \"Evapotranspiration\", or *latent heat flux*. *You will find this in the field called `LHFLX` in your dataset.*\n \n*Note we will look more carefully at atmospheric absorption and emission processes later. You do not need to try to calculate terms such as \"Emitted by Atmosphere\" or \"Atmospheric Window\"*\n\n**Based on your results above, answer the following questions:**\n\n- Is the CESM control simulation at (or near) **energy balance**?\n- Do you think this simulation is near equilibrium?\n- Summarize in your own words what you think are the most important similarities and differences of the global energy budgets in the CESM simulation and the observations.", "_____no_output_____" ], [ "## Problem 2: Verifying the annual cycle in global mean surface temperature against observations\n\nIn the class notes we plotted the **timeseries of global mean surface temperature** in the CESM control simulation, and found an **annual cycle**. The purpose of this exercise is to verify that this phenomenon is also found in the observed temperature record. If so, then we can conclude that it is a real feature of Earth's climate and not an artifact of the numerical model.\n\nFor observations, we will use the **NCEP Reanalysis data**.\n\n*Reanalysis data is really a blend of observations and output from numerical weather prediction models. It represents our “best guess” at conditions over the whole globe, including regions where observations are very sparse.*\n\nThe necessary data are all served up over the internet. We will look at monthly climatologies averaged over the 30 year period 1981 - 2010.\n\nYou can browse the available data here: \nhttps://psl.noaa.gov/thredds/catalog/Datasets/ncep.reanalysis.derived/catalog.html\n\n**Surface air temperature** is contained in a file called `air.2m.mon.ltm.nc`, which is found in the collection called `surface_gauss`. \n\nHere's a link directly to the catalog page for this data file:\nhttps://psl.noaa.gov/thredds/catalog/Datasets/ncep.reanalysis.derived/surface_gauss/catalog.html?dataset=Datasets/ncep.reanalysis.derived/surface_gauss/air.2m.mon.ltm.nc\n\nNow click on the `OPeNDAP` link. A page opens up with lots of information about the contents of the file. The `Data URL` is what we need to read the data into our Python session. For example, this code opens the file and displays a list of the variables it contains:", "_____no_output_____" ] ], [ [ "import xarray as xr\nurl = 'https://psl.noaa.gov/thredds/dodsC/Datasets/ncep.reanalysis.derived/surface_gauss/air.2m.mon.ltm.nc'\nncep_air2m = xr.open_dataset(url, decode_times=False)\n\nprint(ncep_air2m)", "<xarray.Dataset>\nDimensions: (lat: 94, lon: 192, nbnds: 2, time: 12)\nCoordinates:\n * lon (lon) float32 0.0 1.875 3.75 ... 354.375 356.25 358.125\n * time (time) float64 -6.571e+05 -6.57e+05 ... -6.567e+05\n * lat (lat) float32 88.542 86.6531 ... -86.6531 -88.542\nDimensions without coordinates: nbnds\nData variables:\n climatology_bounds (time, nbnds) float64 ...\n air (time, lat, lon) float32 ...\n valid_yr_count (time, lat, lon) float32 ...\nAttributes:\n Conventions: COARDS\n title: mean daily NMC reanalysis (1958)\n description: Data is from NMC initialized reanalysis\\n...\n platform: Model\n not_missing_threshold_percent: minimum 3% values input to have non-missi...\n history: Created 2011/07/12 by doMonthLTM\\nConvert...\n dataset_title: NCEP-NCAR Reanalysis 1\n References: http://www.psl.noaa.gov/data/gridded/data...\n" ] ], [ [ "The temperature data is called `air`. Take a look at the details:", "_____no_output_____" ] ], [ [ "print(ncep_air2m.air)", "<xarray.DataArray 'air' (time: 12, lat: 94, lon: 192)>\n[216576 values with dtype=float32]\nCoordinates:\n * lon (lon) float32 0.0 1.875 3.75 5.625 ... 352.5 354.375 356.25 358.125\n * time (time) float64 -6.571e+05 -6.57e+05 ... -6.568e+05 -6.567e+05\n * lat (lat) float32 88.542 86.6531 84.7532 ... -84.7532 -86.6531 -88.542\nAttributes:\n long_name: Monthly Long Term Mean of Air Temperature\n valid_range: [150. 400.]\n units: degK\n precision: 2\n GRIB_id: 11\n GRIB_name: TMP\n var_desc: Air temperature\n level_desc: 2 m\n statistic: Long Term Mean\n parent_stat: Mean\n actual_range: [198.33992 311.8952 ]\n dataset: NCEP Reanalysis Derived Products\n _ChunkSizes: [ 1 94 192]\n" ] ], [ [ "Notice that the dimensions are `(time: 12, lat: 94, lon: 192)`. The time dimension is calendar months. But note that the lat/lon grid is not the same as our model output! \n\n*Think about how you will handle calculating the global average of these data.*", "_____no_output_____" ], [ "### Your task:\n\n- Make a well-labeled timeseries graph of the global-averaged observed average surface air temperature climatology. \n- Verify that the annual cycle we found in the CESM simulation also exists in the observations.\n- In your own words, suggest a plausible physical explanation for why this annual cycle exists. ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
4a001db364ab8f99fc126beb5a96c962dc18d547
6,155
ipynb
Jupyter Notebook
Figure_4/Figure_4.ipynb
StanBarmentloo/J1407_transit_search_activity
898ca259eb186be55232bd39485598791cd197f3
[ "BSD-2-Clause" ]
null
null
null
Figure_4/Figure_4.ipynb
StanBarmentloo/J1407_transit_search_activity
898ca259eb186be55232bd39485598791cd197f3
[ "BSD-2-Clause" ]
null
null
null
Figure_4/Figure_4.ipynb
StanBarmentloo/J1407_transit_search_activity
898ca259eb186be55232bd39485598791cd197f3
[ "BSD-2-Clause" ]
null
null
null
33.091398
114
0.584565
[ [ [ "import numpy as np\nimport matplotlib.pyplot as plt\nimport butcher\nimport bro\nimport os\n\nfrom astropy.io import ascii\nfrom astropy.timeseries import LombScargle", "_____no_output_____" ], [ "#Reading in the data\n\n#Get the data directory\ncwd = os.getcwd()\ndata_dir = cwd.replace('Figure_4', 'Data\\\\')\n\n#ASAS data\norgasas_data = ascii.read(data_dir + 'asas.csv')\nasas_mask = (orgasas_data['emag'] < 0.05)\nasas_data = orgasas_data[asas_mask]\n\nasas_flux = butcher.mag_to_flux(asas_data['mag'])\nasas_eflux = butcher.emag_to_eflux(asas_data['mag'], asas_data['emag'])\n\n#ASASSN data\norgasassn_data = ascii.read(data_dir + 'asassn.csv')\nasassn_mask = (orgasassn_data['emag'] < 0.05)\nasassn_data = orgasassn_data[asassn_mask]\n\nasassn_flux = butcher.mag_to_flux(asassn_data['mag'])\nasassn_eflux = butcher.emag_to_eflux(asassn_data['mag'], asassn_data['emag'])\n\n#KELT data\norgkelt_data = ascii.read(data_dir + 'kelt.csv')\nkelt_mask = (orgkelt_data['emag'] < 0.05)\nkelt_data = orgkelt_data[kelt_mask]\n\nkelt_flux = butcher.mag_to_flux(kelt_data['mag'])\nkelt_eflux = butcher.emag_to_eflux(kelt_data['mag'], kelt_data['emag'])\n\n#PROMPT data\norgprompt_data = ascii.read(data_dir + 'prompt.csv') #time is JD-2450000\nprompt_mask = (orgprompt_data['emag'] < 0.05)\nprompt_data = orgprompt_data[prompt_mask]\n\nprompt_flux = butcher.mag_to_flux(prompt_data['mag'])\nprompt_eflux = butcher.emag_to_eflux(prompt_data['mag'], prompt_data['emag'])\n\n#ROAD data\norgroad_data = ascii.read(data_dir + 'road.csv') #time is JD-2450000\nroad_mask = (orgroad_data['emag'] < 0.05)\nroad_data = orgroad_data[road_mask]\n\nroad_flux = butcher.mag_to_flux(road_data['mag'])\nroad_eflux = butcher.emag_to_eflux(road_data['mag'], road_data['emag'])", "_____no_output_____" ], [ "#Correct for the long term flux decrease mentioned in section 3.1\n\nasas_flux = butcher.long_correct(asas_data['MJD'], asas_flux, asas_eflux)\nasassn_flux = butcher.long_correct(asassn_data['MJD'], asassn_flux, asassn_eflux)\nkelt_flux = butcher.long_correct(kelt_data['HJD'], kelt_flux, kelt_eflux)\nprompt_flux = butcher.long_correct(prompt_data['HJD'], prompt_flux, prompt_eflux)\nroad_flux = butcher.long_correct(road_data['HJD'], road_flux, road_eflux)", "_____no_output_____" ], [ "#Store the individual telescopes in lists\n\ntimes = [asas_data['MJD'], asassn_data['MJD'], kelt_data['HJD'], prompt_data['HJD'], road_data['HJD']]\nfluxes = [asas_flux, asassn_flux, kelt_flux, prompt_flux, road_flux]\nuncertainties = [asas_eflux, asassn_eflux, kelt_eflux, prompt_eflux, road_eflux]\nnames = ['ASAS', 'ASAS-SN', 'KELT', 'PROMPT', 'ROAD']", "_____no_output_____" ], [ "#Remove the periodicities\n\nnames = ['ASAS', 'ASASSN', 'KELT', 'PROMPT', 'ROAD']\norg_powers, powers2 = [], []\n\nfor j in range(5):\n time, flux, eflux = times[j], fluxes[j], uncertainties[j]\n\n corrflux1, periods1, freq1, power1 = bro.short_correct(time, flux, eflux, min_chunk_size = 10)\n\n #Get the uncorrected lombscargle\n frequencies = 1/np.linspace(2, 10, 3000)\n org_power = LombScargle(time, flux-np.mean(flux), dy = eflux).power(frequencies)\n \n #Get the doubly bro corrected lombscargle\n corrflux2, periods2, freq2, power2 = bro.short_correct(time, corrflux1, eflux, min_chunk_size = 10)\n frequencies = 1/np.linspace(2, 10, 3000)\n power2 = LombScargle(time, corrflux2-np.mean(corrflux2), dy = eflux).power(frequencies)\n \n org_powers.append(org_power)\n powers2.append(power2)", "_____no_output_____" ], [ "#Create the figure \n\nimport matplotlib\nplt.style.use('seaborn-dark-palette')\nfont = {'family' : 'normal',\n 'weight' : 'normal',\n 'size' : 20}\nmatplotlib.rc('font', **font)\n\nfig, ax = plt.subplots(5)\nfor i in range(5):\n ax[i].plot(1/frequencies, org_powers[i], alpha = 0.7, c='grey', label = 'Pre-Correction ' + names[i])\n ax[i].plot(1/frequencies, powers2[i], alpha = 0.7,label = 'Post-Correction ' + names[i])\n ax[i].legend(fontsize = 18)\n\nfig.text(0.35, 0.09, 'Signal Period (Days)', fontsize = 24)\nfig.text(0.02, 0.35, 'Signal Power (Arbitrary Units)', rotation = 90, fontsize = 24)\nfig = plt.gcf()\nfig.set_size_inches(12,20)\n#plt.savefig('Removing_Dominant_Cycle.pdf')\nplt.show()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code" ] ]
4a00213e0f2494eb5bc41df720e202b0c69d1b29
19,169
ipynb
Jupyter Notebook
Sentiment-Analysis.ipynb
Bharat-Reddy/Sentiment-Analysis-of-IMDB-Film-Reviews
af9227078a0739319f949e87e48725296fe683cb
[ "MIT" ]
null
null
null
Sentiment-Analysis.ipynb
Bharat-Reddy/Sentiment-Analysis-of-IMDB-Film-Reviews
af9227078a0739319f949e87e48725296fe683cb
[ "MIT" ]
null
null
null
Sentiment-Analysis.ipynb
Bharat-Reddy/Sentiment-Analysis-of-IMDB-Film-Reviews
af9227078a0739319f949e87e48725296fe683cb
[ "MIT" ]
null
null
null
29.81182
1,030
0.594189
[ [ [ "# IMDb Movie Reviews Classifier", "_____no_output_____" ] ], [ [ "from sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.model_selection import train_test_split\nfrom nltk.corpus import stopwords\nfrom nltk.stem.porter import PorterStemmer\nfrom nltk.stem import WordNetLemmatizer\nfrom sklearn.svm import LinearSVC\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nimport re", "_____no_output_____" ], [ "reviews_train = []\nfor line in open(r'.\\movie_data\\full_train.txt', 'r', encoding=\"utf8\"):\n reviews_train.append(line.strip())\n \nreviews_test = []\nfor line in open(r'.\\movie_data\\full_test.txt', 'r', encoding=\"utf8\"):\n reviews_test.append(line.strip())", "_____no_output_____" ] ], [ [ "## Using 50% for training and remaining 50% for testing", "_____no_output_____" ] ], [ [ "len(reviews_train), len(reviews_test)", "_____no_output_____" ], [ "reviews_train[5]", "_____no_output_____" ] ], [ [ "### We can see that the data is very messy, So let's do some cleaning and pre-processing", "_____no_output_____" ], [ "## Removing punctuation and HTML tags and making everything lower-case", "_____no_output_____" ] ], [ [ "REPLACE_NO_SPACE = re.compile(\"[.;:!\\'?,\\\"()\\[\\]]\")\nREPLACE_WITH_SPACE = re.compile(\"(<br\\s*/><br\\s*/>)|(\\-)|(\\/)\")\n\ndef preprocess_reviews(reviews):\n reviews = [REPLACE_NO_SPACE.sub(\"\", line.lower()) for line in reviews]\n reviews = [REPLACE_WITH_SPACE.sub(\" \", line) for line in reviews]\n \n return reviews\n\nreviews_train_clean = preprocess_reviews(reviews_train)\nreviews_test_clean = preprocess_reviews(reviews_test)", "_____no_output_____" ], [ "reviews_train_clean[5]", "_____no_output_____" ] ], [ [ "## Removing stop words", "_____no_output_____" ] ], [ [ "english_stop_words = stopwords.words('english')\ndef remove_stop_words(corpus):\n removed_stop_words = []\n for review in corpus:\n removed_stop_words.append(\n ' '.join([word for word in review.split() \n if word not in english_stop_words])\n )\n return removed_stop_words\n\nno_stop_words = remove_stop_words(reviews_train_clean)", "_____no_output_____" ], [ "no_stop_words[5]", "_____no_output_____" ] ], [ [ "# Normalizing Text", "_____no_output_____" ], [ "## 1. Stemming", "_____no_output_____" ] ], [ [ "def get_stemmed_text(corpus):\n stemmer = PorterStemmer()\n return [' '.join([stemmer.stem(word) for word in review.split()]) for review in corpus]\n\nstemmed_reviews = get_stemmed_text(reviews_train_clean)", "_____no_output_____" ], [ "stemmed_reviews[5]", "_____no_output_____" ] ], [ [ "## 2. Lemmatization", "_____no_output_____" ] ], [ [ "def get_lemmatized_text(corpus):\n lemmatizer = WordNetLemmatizer()\n return [' '.join([lemmatizer.lemmatize(word) for word in review.split()]) for review in corpus]\n\nlemmatized_reviews = get_lemmatized_text(reviews_train_clean)", "_____no_output_____" ], [ "lemmatized_reviews[5]", "_____no_output_____" ] ], [ [ "## Vectorization using binary represenation", "_____no_output_____" ] ], [ [ "ngram_vectorizer = CountVectorizer(binary=True, ngram_range=(1,1))\nngram_vectorizer.fit(reviews_train_clean)\nX = ngram_vectorizer.transform(reviews_train_clean)\nX_test = ngram_vectorizer.transform(reviews_test_clean)\n\nX_train, X_val, y_train, y_val = train_test_split(\n X, target, train_size = 0.75\n)", "_____no_output_____" ] ], [ [ "## Classification using Logistic Regression", "_____no_output_____" ] ], [ [ "final_ngram = LogisticRegression(C=0.5)\nfinal_ngram.fit(X, target)\nprint (\"Final Accuracy: %s\" \n % accuracy_score(target, final_ngram.predict(X_test)))", "Final Accuracy: 0.8736\n" ] ], [ [ "## Classification using SVM Classifier", "_____no_output_____" ] ], [ [ "final_svm_ngram = LinearSVC(C=0.01)\nfinal_svm_ngram.fit(X, target)\nprint (\"Final Accuracy: %s\" \n % accuracy_score(target, final_svm_ngram.predict(X_test)))", "Final Accuracy: 0.87856\n" ] ], [ [ "## Vectorization using Word Counts", "_____no_output_____" ] ], [ [ "ngram_vectorizer = CountVectorizer(binary=False, ngram_range=(1,1))\nngram_vectorizer.fit(reviews_train_clean)\nX = ngram_vectorizer.transform(reviews_train_clean)\nX_test = ngram_vectorizer.transform(reviews_test_clean)\n\nX_train, X_val, y_train, y_val = train_test_split(\n X, target, train_size = 0.75\n)", "C:\\Users\\bhara\\Anaconda3\\lib\\site-packages\\sklearn\\model_selection\\_split.py:2179: FutureWarning: From version 0.21, test_size will always complement train_size unless both are specified.\n FutureWarning)\n" ] ], [ [ "## Classification using Logistic Regression", "_____no_output_____" ] ], [ [ "final_wc = LogisticRegression(C=0.01)\nfinal_wc.fit(X, target)\nprint (\"Final Accuracy: %s\" \n % accuracy_score(target, final_wc.predict(X_test)))", "C:\\Users\\bhara\\Anaconda3\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n FutureWarning)\n" ] ], [ [ "## Classification using SVM Classifier", "_____no_output_____" ] ], [ [ "final_svm_ngram = LinearSVC(C=0.01)\nfinal_svm_ngram.fit(X, target)\nprint (\"Final Accuracy: %s\" \n % accuracy_score(target, final_svm_ngram.predict(X_test)))", "Final Accuracy: 0.87808\n" ] ], [ [ "## Vectorization using TF-IDF", "_____no_output_____" ] ], [ [ "tfidf_vectorizer = TfidfVectorizer(ngram_range=(1,1))\ntfidf_vectorizer.fit(reviews_train_clean)\nX = tfidf_vectorizer.transform(reviews_train_clean)\nX_test = tfidf_vectorizer.transform(reviews_test_clean)\n\nX_train, X_val, y_train, y_val = train_test_split(\n X, target, train_size = 0.75\n)", "C:\\Users\\bhara\\Anaconda3\\lib\\site-packages\\sklearn\\model_selection\\_split.py:2179: FutureWarning: From version 0.21, test_size will always complement train_size unless both are specified.\n FutureWarning)\n" ] ], [ [ "## Classification using Logistic Regression", "_____no_output_____" ] ], [ [ "final_wc = LogisticRegression(C=1)\nfinal_wc.fit(X, target)\nprint (\"Final Accuracy: %s\" \n % accuracy_score(target, final_wc.predict(X_test)))", "Final Accuracy: 0.8824\n" ] ], [ [ "## Classification using SVM", "_____no_output_____" ] ], [ [ "final_svm_ngram = LinearSVC(C=0.05)\nfinal_svm_ngram.fit(X, target)\nprint (\"Final Accuracy: %s\" \n % accuracy_score(target, final_svm_ngram.predict(X_test)))", "Final Accuracy: 0.8792\n" ] ], [ [ "# Final Model", "_____no_output_____" ] ], [ [ "stop_words = ['in', 'of', 'at', 'a', 'the']\nngram_vectorizer = CountVectorizer(binary=True, ngram_range=(1, 3), stop_words=stop_words)\nngram_vectorizer.fit(reviews_train_clean)\nX = ngram_vectorizer.transform(reviews_train_clean)\nX_test = ngram_vectorizer.transform(reviews_test_clean)\n\nX_train, X_val, y_train, y_val = train_test_split(\n X, target, train_size = 0.75\n)\n \nfinal = LinearSVC(C=0.01)\nfinal.fit(X, target)\nprint (\"Final Accuracy: %s\" \n % accuracy_score(target, final.predict(X_test)))", "Final Accuracy: 0.90024\n" ] ], [ [ "# Finally broke the 90% mark!!", "_____no_output_____" ] ], [ [ "feature_to_coef = {\n word: coef for word, coef in zip(\n cv.get_feature_names(), final_model.coef_[0]\n )\n}\nfor best_positive in sorted(\n feature_to_coef.items(), \n key=lambda x: x[1], \n reverse=True)[:5]:\n print (best_positive)\nprint('-------------------------------------')\nfor best_negative in sorted(\n feature_to_coef.items(), \n key=lambda x: x[1])[:5]:\n print (best_negative)", "('excellent', 0.9292548584422305)\n('perfect', 0.7907005520268843)\n('great', 0.6745323708155975)\n('amazing', 0.6127039374291681)\n('superb', 0.6019367696526459)\n-------------------------------------\n('worst', -1.3645957254906054)\n('waste', -1.1664240886149932)\n('awful', -1.0324187671234308)\n('poorly', -0.8752018284709887)\n('boring', -0.8563543047847684)\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4a002e6524dd8244b245c6fe0755c1431a82807c
284,160
ipynb
Jupyter Notebook
lectures/timeseries/timeseries-1.ipynb
malvin0704/PIC16B
a88b9e1264dc2f842ef22a7742a2e43c16383c8f
[ "MIT" ]
null
null
null
lectures/timeseries/timeseries-1.ipynb
malvin0704/PIC16B
a88b9e1264dc2f842ef22a7742a2e43c16383c8f
[ "MIT" ]
null
null
null
lectures/timeseries/timeseries-1.ipynb
malvin0704/PIC16B
a88b9e1264dc2f842ef22a7742a2e43c16383c8f
[ "MIT" ]
null
null
null
354.756554
61,248
0.926626
[ [ [ "# Time Series Modeling\n\nIn this lecture, we'll do some **basic** work with time series modeling. Time series are surprisingly complicated objects to work with and model, and many people spend their careers considering statistical questions related to effective modeling of timeseries. In this set of lecture notes, we won't be able to go into too much detail, but we will highlight some of the key questions and approaches to addressing them. \n\n## Note\n\n*I had originally intended to approach time series modeling from a deep learning perspective, using TensorFlow. This is possible; see [here](https://www.tensorflow.org/tutorials/structured_data/time_series) for an example. The general idea is actually pretty similar to what we used for text generation. However, a quick check indicated that contemporary best practice is still to use models developed in econometrics and statistics, as these tend to be more accurate and more interpretable.*\n\n*Parts of these lecture notes are based on [this tutorial](https://towardsdatascience.com/an-end-to-end-project-on-time-series-analysis-and-forecasting-with-python-4835e6bf050b). For an overview of the functionality available in the statsmodels package for timeseries, take a look [here](https://www.statsmodels.org/stable/tsa.html). Here is a [nice overview](https://people.duke.edu/~rnau/411arim.htm) of basic ARIMA models, which can help give some interpretation for the meaning of the `order` parameter that we use below.* ", "_____no_output_____" ] ], [ [ "import sqlite3\nimport pandas as pd\nimport numpy as np\nfrom matplotlib import pyplot as plt\nplt.style.use('seaborn-whitegrid')\nimport statsmodels.api as sm", "_____no_output_____" ] ], [ [ "## Data: NOAA Climate\n\nFor this lecture, we're actually going to go back to the NOAA climate data that we used early in the quarter. Using the database that we constructed in Week 2, I'm going to grab data for Amundsen-Scott weather station, which is in the deep Antarctic. ", "_____no_output_____" ] ], [ [ "with sqlite3.connect(\"../sql/temps.db\") as conn:\n\n cmd = \\\n \"\"\"\n SELECT S.name, T.year, T.month, T.temp\n FROM temperatures T\n LEFT JOIN stations S ON T.id = S.id\n WHERE S.NAME == \"AMUNDSEN_SCOTT\" AND T.year > 2000\n \"\"\"\n\n df = pd.read_sql_query(cmd, conn)", "_____no_output_____" ] ], [ [ "## Quick Data Prep\n\nThere's a bit of data preparation needed before we can do formal time series modeling. In particular, we need to make a **Date** column, and set it as the index for the timeseries that we care about. ", "_____no_output_____" ] ], [ [ "df[\"Date\"] = df[\"Year\"].astype(str) + \"-\" + df[\"Month\"].astype(str)\ndf[\"Date\"] = pd.to_datetime(df[\"Date\"])\ndf.head()", "_____no_output_____" ] ], [ [ "The next thing we need to do is set the Date as the index for our dataframe. ", "_____no_output_____" ] ], [ [ "df.index = pd.DatetimeIndex(df[\"Date\"], freq = \"MS\")", "_____no_output_____" ] ], [ [ "Finally, we are going to want to make predictions and test them, which means that we still perform a train/test split. I'm going to take the most recent 4 years as test data. ", "_____no_output_____" ] ], [ [ "recency = -48\ny_train = df.iloc[:recency][\"Temp\"]\ny_test = df.iloc[recency:][\"Temp\"]\ny_train.shape, y_test.shape", "_____no_output_____" ] ], [ [ "Finally, let's take a look at our training data. ", "_____no_output_____" ] ], [ [ "m = y_train.mean()\ntime = y_train.index\nfig, ax = plt.subplots(1, figsize = (6, 3))\nax.plot(time, np.ones(len(time))*m, label = \"reference\", color = \"black\")\nax.plot(y_train, label = \"temp\")", "_____no_output_____" ] ], [ [ "Notice that there is considerable seasonal variation, on the order of 30 degrees Celsius, within each year. This can make it difficult to see trends. For example, would you say that the overall trend in this image is upward, downard, or neutral? It's very difficult to say! Let's now introduce an exploratory tool that can help us think about this kind of question. ", "_____no_output_____" ], [ "## Time series Decomposition\n\nTime series decomposition is technique for exploratory data analysis that allows you to separate a time series into separate components, like this: \n\n$$\\text{data} = \\text{trend} + \\text{seasonal} + \\text{noise}$$\n\nTechnically speaking, the above corresponds to an *additive* model. We can also use a multiplicative model: \n\n$$\\text{data} = \\text{trend} \\times \\text{seasonal} \\times \\text{noise}$$\n\nThe choice of which model to use for decomposition can be a tricky one, but additive models are usually a sound place to start. ", "_____no_output_____" ] ], [ [ "# specifying period not necessary because we have the frequency defined\n# so this would also work: \n# decomposition = sm.tsa.seasonal_decompose(y, model='additive') \ndecomposition = sm.tsa.seasonal_decompose(y_train, model='additive', period = 12, ) ", "_____no_output_____" ] ], [ [ "The object returned by the decomposition has a convenient `plot()` method. ", "_____no_output_____" ] ], [ [ "fig = decomposition.plot()", "_____no_output_____" ] ], [ [ "Visually, you can \"add up\" the bottom three rows to obtain the top three row. In a bit more detail: \n\n1. The `trend` component is the model's best estimate of the overall direction of the data up or down. \n2. The `seasonal` component is the model's best estimate of seasonal variation. It's constrained to be periodic (in this case, with period 12). \n3. The `resid`ual is whatever part of the data is left over. Large residuals, or residuals with nonstationary distributions (distributions that change over time) suggest that the model we used for the decomposition was not very good. \n\nThere are many choices of model that can be used for timeseries decomposition. The default in the `statsmodels` package is based on moving averages and is relatively naive. Much more sophisticated models are typically used in practice. \n\nNow that we've performed our decomposition, we're equipped to re-examine our question from earlier about the trend in temperatures at Amundsen-Scott station. ", "_____no_output_____" ] ], [ [ "trend = decomposition.trend\ntime = trend.index\nm = decomposition.trend.mean()\nfig, ax = plt.subplots(1, figsize = (6, 3))\n# ax.plot(time, np.ones(len(time))*m, label = \"reference\", color = \"grey\")\nax.plot(trend, label = \"trend\")", "_____no_output_____" ] ], [ [ "The trendline still displays considerable fluctuation. It looks like there may be some upward trend, but more data or a more sophisticated decomposition method would be required in order to say anything conclusive here. ", "_____no_output_____" ], [ "## Time Series Forecasting\n\nDecomposition is a useful tool that can help us spot trends in data. However, we often want to do better than trendspotting. For this, we should select and fit statistical models. The problem of choosing exactly which model to use is quite subtle, and the statistical theory of this problem can occupy entire courses and even research careers. If you're interested in learning the theory of time series analysis, Statistics 170 at UCLA appears to be the way to go. \n\nWhen we take a machine-learning approach to this problem, we can, to an extent, circumvent the theoretical questions by evaluating models on validation data. That's what we'll do today. However, this approach can really only go so far -- sound footing in both the theory of time series and the domain you're studying are necessary for best results here. \n\nLet's do an example using a SARIMAX model, which stands for \"Seasonal AutoRegressive Integrated Moving Average with eXogenous regressors.\" These are fairly general and flexible models for seasonal data. When fitting models such as these, it's necessary to specify one or more `order` parameters used to determine the structure of the model. ", "_____no_output_____" ] ], [ [ "order = (0, 1, 0)", "_____no_output_____" ] ], [ [ "This specification means that we are going to use a first-order auto-regressive model with 0th-order differences and a 0th order moving average. \n\nThe SARIMAX model also uses a separate *seasonal* mini-model, which requires its own parameters. The `12` here refers to the 12 months of the year. ", "_____no_output_____" ] ], [ [ "seasonal_order = (0, 1, 1, 12)", "_____no_output_____" ], [ "model = sm.tsa.SARIMAX(y_train, order = order, seasonal_order = seasonal_order)", "_____no_output_____" ], [ "fit = model.fit()", "_____no_output_____" ], [ "print(fit.summary())", " SARIMAX Results \n============================================================================================\nDep. Variable: Temp No. Observations: 193\nModel: SARIMAX(0, 1, 0)x(0, 1, [1], 12) Log Likelihood -498.915\nDate: Fri, 14 May 2021 AIC 1001.830\nTime: 09:20:13 BIC 1008.216\nSample: 01-01-2001 HQIC 1004.419\n - 01-01-2017 \nCovariance Type: opg \n==============================================================================\n coef std err z P>|z| [0.025 0.975]\n------------------------------------------------------------------------------\nma.S.L12 -0.9931 1.318 -0.754 0.451 -3.576 1.590\nsigma2 12.5231 15.923 0.786 0.432 -18.686 43.732\n===================================================================================\nLjung-Box (L1) (Q): 33.00 Jarque-Bera (JB): 5.09\nProb(Q): 0.00 Prob(JB): 0.08\nHeteroskedasticity (H): 0.83 Skew: 0.41\nProb(H) (two-sided): 0.46 Kurtosis: 3.14\n===================================================================================\n\nWarnings:\n[1] Covariance matrix calculated using the outer product of gradients (complex-step).\n" ] ], [ [ "## Forecasting\n\nBy default, the `get_prediction()` method of the `fit` object will produce the model's \"prediction\" on training data. ", "_____no_output_____" ] ], [ [ "train_preds = fit.get_prediction().predicted_mean\n\nrecency = -48\n\nplt.plot(y_train[recency:], color = \"grey\", label = \"data\")\nplt.plot(train_preds[recency:], zorder = 10, label = \"modeled\")\nplt.legend()", "_____no_output_____" ] ], [ [ "To get the predictions on test data, we can pass explicit `start` and `end` parameters. ", "_____no_output_____" ] ], [ [ "test_preds = fit.get_prediction(start = y_test.index.values[0], \n end = y_test.index.values[-1])\n\ntest_preds = test_preds.predicted_mean\n\nrecency = -48\n\nplt.plot(y_train[recency:], color = \"grey\", label = \"data\")\nplt.scatter(y_test.index.values, y_test, color = \"red\", label = \"test data\", s = 5)\nplt.plot(test_preds, zorder = 10, label = \"modeled\")\n# plt.legend()", "_____no_output_____" ] ], [ [ "Looks pretty reasonable overall! \n\nOne important item here is missing: an expression of our uncertainty. Because these are statistical models, they have error bars. Communicating the error bars appropriately is a fundamental part of responsible forecasting. Here's a way to plot them. ", "_____no_output_____" ] ], [ [ "test_preds = fit.get_prediction(start = y_test.index.values[0], \n end = y_test.index.values[-1])\n\ntest_ci = test_preds.conf_int()\n\ntest_preds = test_preds.predicted_mean\n\nrecency = -48\n\nplt.plot(y_train[recency:], color = \"grey\", label = \"data\")\n\nplt.fill_between(test_ci.index, test_ci.iloc[:,0], test_ci.iloc[:,1], color = \"gray\", alpha = 0.3)\n\nplt.scatter(y_test.index.values, y_test, color = \"red\", label = \"test data\", s = 5)\nplt.plot(test_preds, zorder = 10, label = \"modeled\")\n# plt.legend()", "_____no_output_____" ] ], [ [ "As we'd expect, although the model doesn't perfectly fit every individual piece of data, the data falls within the error bars the vast majority of the time. \n\nFinally, we can also use our model for long-term predictions: ", "_____no_output_____" ] ], [ [ "test_preds = fit.get_prediction(start = y_test.index.values[0], \n end = pd.to_datetime('2030-01-01'))\n\ntest_ci = test_preds.conf_int()\n\ntest_preds = test_preds.predicted_mean\n\nrecency = -48\n\nplt.plot(y_train[recency:], color = \"grey\", label = \"data\")\n\nplt.scatter(y_test.index.values, y_test, color = \"red\", label = \"test data\", s = 5)\nplt.plot(test_preds, zorder = 10, label = \"modeled\")", "_____no_output_____" ] ], [ [ "This model appears to predict a considerable increase in temperature, on the order of 1-2 ℃ over the next 10 years. ", "_____no_output_____" ], [ "## Model Selection\n\nIn the last example, we used the `order` and `seasonal_order` parameters above to specify the model structure. We didn't really talk about what these parameters mean, and going into detail would take us well beyond the scope of this course. However, we do have to face the problem of how to *choose* these parameters. For this, a convenient approach is to define a predictive loss function and choose the combination of parameters that minimizes it. This is not the statistically principled way to do things -- it's very much in the machine learning spirit. \n\nHere's an example in which we'll compare multiple possibilities for the `order` parameter by searching across all combinations within a specified range. In machine learning, this strategy is called \"*grid search*.\" It's really only practical when we have a small number of combinations and when our model is fairly quick to train. \n\nThere are multiple choices for the loss function. A common one to use is the AIC, which is a measure that trades off model complexity and accuracy. ", "_____no_output_____" ] ], [ [ "fit.aic", "_____no_output_____" ] ], [ [ "The AIC is an ok thing to compute when we don't have access to test data, but...we do! So, instead of computing the AIC, we'll instead compute the mean-square prediction error on the test set. ", "_____no_output_____" ] ], [ [ "def test_MSE(fit, y_test):\n \n test_preds = fit.get_prediction(start = y_test.index.values[0], \n end = y_test.index.values[-1])\n test_preds = test_preds.predicted_mean\n \n return ((y_test - test_preds)**2).mean()", "_____no_output_____" ], [ "from itertools import product\na = range(0, 2)\n\nbest_order = (0, 0, 0)\nbest_MSE = np.inf\nfor order in product(a, a, a):\n model = sm.tsa.SARIMAX(y_train, order = order, seasonal_order = seasonal_order)\n fit = model.fit()\n MSE = test_MSE(fit, y_test)\n if MSE < best_MSE:\n print(\"Found MSE \" + str(round(MSE, 2)) + \" with order \" + str(order))\n best_MSE = MSE\n best_order = order\n best_model = model ", "Found MSE 6.71 with order (0, 0, 0)\nFound MSE 6.71 with order (0, 0, 1)\nFound MSE 6.01 with order (0, 1, 0)\n" ] ], [ [ "By sheer coincidence, the best order is the same one that I used to construct the original model above. One could also use this strategy to choose the `seasonal_order` parameters. ", "_____no_output_____" ], [ "## Reminder\n\nTime series forecasting is a challenging art that requires both statistical know-how and knowledge of the data source in order to do responsibly. The approach here, using validation on unseen test data to perform model selection, is a reasonable way to get started. If you are seriously interested in time series forecasting, however, there's no substitute for a course (like Stat 170) and lots of practice. ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ] ]
4a0057d1fb8b07ea0fa07d0d07f8514781e1b164
868,347
ipynb
Jupyter Notebook
Experiments/.ipynb_checkpoints/analysis - Copy-checkpoint.ipynb
leven87/Unlock-mobile-screen-using-handwritten-graphics
17a0c593ea2d63c28cb41e9329ad497bf0504391
[ "Apache-2.0" ]
null
null
null
Experiments/.ipynb_checkpoints/analysis - Copy-checkpoint.ipynb
leven87/Unlock-mobile-screen-using-handwritten-graphics
17a0c593ea2d63c28cb41e9329ad497bf0504391
[ "Apache-2.0" ]
null
null
null
Experiments/.ipynb_checkpoints/analysis - Copy-checkpoint.ipynb
leven87/Unlock-mobile-screen-using-handwritten-graphics
17a0c593ea2d63c28cb41e9329ad497bf0504391
[ "Apache-2.0" ]
null
null
null
487.287879
149,104
0.933355
[ [ [ "import numpy as np\nimport pandas as pd\nimport math\nfrom math import sin, cos, radians\n\nimport os\nimport matplotlib.pyplot as plt\nimport datetime\n\nimport scipy.stats as st\nimport scipy.signal as sgl\npd.set_option('display.max_columns', 500)\n\n#import fastdtw\nfrom scipy.spatial.distance import euclidean\nfrom fastdtw import fastdtw\n\nimport scipy.interpolate as spi\n\n#hide warning\nimport warnings\nwarnings.filterwarnings('ignore')", "_____no_output_____" ], [ "# file_name='./signature_data_preprocessed/U'+str(i)+'S'+str(j)+'.txt'\nprefix = \"./dataset/test/\"\nfile_name = prefix + '202084182540.sig'\n# file_name = prefix + '002_1_1.sig'\nfile=pd.read_csv(file_name,delimiter=' ', names=['X','Y','TStamp','Pres','EndPts'], header=None, skiprows=2)\nfile_size=len(file)\n\n# file2_name = 'signaturecontent_08191708.sig'\nfile2_name = prefix + 'verify_2020821123742.sig'\nfile2=pd.read_csv(file2_name,delimiter=' ', names=['X','Y','TStamp','Pres','EndPts'], header=None, skiprows=2)\nfile2_size=len(file2)\n\n# file3_name = prefix + '2020720125531.sig'\nfile3_name = prefix + '202084182532.sig'\nfile3=pd.read_csv(file3_name,delimiter=' ', names=['X','Y','TStamp','Pres','EndPts'], header=None, skiprows=2)\nfile3_size=len(file3)\n\nfile_fake_name = prefix + 'verify_2020821123742.sig'\nfile_fake=pd.read_csv(file_fake_name,delimiter=' ', names=['X','Y','TStamp','Pres','EndPts'], header=None, skiprows=2)\nfile_fake_size=len(file_fake)\n\n\nfile_sigpad_name = prefix + 'sigpad.sig'\nfile_sigpad=pd.read_csv(file_sigpad_name,delimiter=' ', names=['X','Y','TStamp','Pres','EndPts'], header=None)\nfile_sigpad_size=len(file_sigpad)", "_____no_output_____" ], [ "startTime = file['TStamp'][0]\nfile['TStamp2'] = (file['TStamp'] - startTime) #ms\n\nstartTime = file3['TStamp'][0]\nfile3['TStamp2'] = (file3['TStamp'] - startTime) #ms\n#数据对比来说,点数相对网上例子,少一半,也不算太差。\n\nstartTime = file_fake['TStamp'][0]\nfile_fake['TStamp2'] = (file_fake['TStamp'] - startTime) #ms\n\nstartTime = file_sigpad['TStamp'][0]\nfile_sigpad['TStamp2'] = (file_sigpad['TStamp'] - startTime) #ms\n\n# file3", "_____no_output_____" ], [ "startTime = file2['TStamp'][0]\nfile2['TStamp2'] = (file2['TStamp'] - startTime) #ms", "_____no_output_____" ], [ "startTime = file3['TStamp'][0]\nfile3['TStamp2'] = (file3['TStamp'] - startTime) #ms", "_____no_output_____" ], [ "fig = plt.figure(figsize=[20,7])\n\n#整理前的两张图片对比\nax1 = fig.add_subplot(2, 3, 1)\nfile.plot.scatter(x = \"Y\", y = \"X\", ax=ax1, marker='o',c='', edgecolors='g', ylim = (0, 1000))\nfile3.plot.scatter(x = \"Y\", y = \"X\", ax=ax1, marker='o',c='', edgecolors='r', ylim = (0, 1000))\nax2 = fig.add_subplot(2, 3, 2)\nfile.plot.scatter(x='TStamp2', y='X', ax=ax2, marker='o',c='', edgecolors='g')\nfile3.plot.scatter(x = \"TStamp2\", y = \"X\", ax=ax2, marker='o',c='', edgecolors='r')\n\nax3 = plt.subplot(2, 3, 3)\nfile.plot.scatter(x='TStamp2', y='Y', ax=ax3, marker='o',c='', edgecolors='g')\n\nax4 = fig.add_subplot(2, 3, 4)\nfile3.plot.scatter(x = \"Y\", y = \"X\", ax=ax4, marker='o',c='', edgecolors='g', ylim = (0, 1000))\nax5 = fig.add_subplot(2, 3, 5)\nfile3.plot.scatter(x='TStamp2', y='X', ax=ax5, marker='o',c='', edgecolors='g')\nax6 = plt.subplot(2, 3, 6)\nfile3.plot.scatter(x='TStamp2', y='Y', ax=ax6, marker='o',c='', edgecolors='g')\n#可以清晰的发现,扫描的点不够,不能完整反映图片", "_____no_output_____" ], [ "fig = plt.figure(figsize=[20, 30])\nfile_sigpad.plot.scatter(x='Y', y='X', marker='o',c='', edgecolors='g')", "_____no_output_____" ], [ "fig = plt.figure(figsize=[20, 30])\nfile.plot.scatter(x='Y', y='X', marker='o',c='', edgecolors='g')", "_____no_output_____" ], [ "# ##Preprocessing\n# P=[]\n# V=[]\n# SDX=[]\n# SDY=[]\n# fileP = file3\n\n# file_size=len(fileP)\n# X=fileP['X']\n# Y=fileP['Y']\n# TS=fileP['TStamp2']\n# BS=fileP['EndPts']\n# # AZ=file['AZ']\n# # AL=file['AL']\n# # P=file['P']\n# aX=sum(X)/file_size\n# aY=sum(Y)/file_size\n# for k in range(0,file_size-1):\n# if TS[k]==TS[k+1]:\n# X[k+1]=(X[k]+X[k+1])/2\n# Y[k+1]=(Y[k]+Y[k+1])/2\n# TS[k+1]=(TS[k]+1)\n# BS[k+1]=(BS[k]+BS[k+1])/2\n# # AZ[k+1]=(AZ[k]+AZ[k+1])/2\n# # AL[k+1]=(AL[k]+AL[k+1])/2\n# # P[k+1]=(P[k]+P[k+1])/2\n# if k<file_size-1:\n# V.append(((math.sqrt((X[k+1]-X[k])**2+(Y[k+1]-Y[k])**2))*(TS[file_size-1]-TS[0]))/(TS[k+1]-TS[k]))\n# SDX.append((X[k]-aX)**2)\n# SDY.append((Y[k]-aY)**2)\n# SDX.append((X[file_size-1]-aX)**2)\n# SDY.append((Y[file_size-1]-aY)**2)\n# V.append(0)\n\n\n# # data={'X':X,'Y':Y,'TS':TS,'BS':BS,'AZ':AZ,'AL':AL,'P':P,'V':V,'SDX':SDX,'SDY':SDY}\n# data={'X':X,'Y':Y,'TStamp2':TS,'EndPts':BS,'P':P,'V':V,'SDX':SDX,'SDY':SDY}", "_____no_output_____" ], [ "# fig = plt.figure(figsize=[6,4])\n# # plt.scatter(x = data[\"Y\"], y = data[\"V\"], marker='o',c='', edgecolors='g')\n# plt.scatter(x = list(range(0,len(file3), 1)), y = data[\"V\"], marker='o',c='', edgecolors='g')\n\n# # plt.bar(list(range(0,len(data1), 1)), data['V'])\n# # print(data)", "_____no_output_____" ], [ "fig = plt.figure(figsize=[20, 30])\nfile3.plot.scatter(x='Y', y='X', marker='o',c='', edgecolors='g')", "_____no_output_____" ], [ "intervals = []\nfor index, row in file.iterrows():\n if index == 0:\n intervals.append(0)\n continue\n# print(file['TStamp2'][index-1])\n interval = file['TStamp2'][index]-file['TStamp2'][index-1]\n if interval > 300:\n continue\n intervals.append(interval) \n# intervals = np.array(intervals)\n\n\nfig = plt.figure(figsize=[20, 10])\nplt.bar(range(len(intervals)), intervals)\n\n# 显示横轴标签\nplt.xlabel(\"Point sequence\")\n# 显示纵轴标签\nplt.ylabel(\"Time interval(ms)\")\n# 显示图标题\nplt.title(\"Histogram\")\nfig.show() # it is clearly shown that the intervals art not the same.", "_____no_output_____" ], [ "def remove_duplicated_point(df):\n df_new = df.drop(index=df.index) \n old_x = df['X'][0]\n old_y = df['Y'][0]\n for index, row in df.iterrows():\n if row['X'] != old_x or row['Y']!=old_y or index == 0:\n df_new.loc[len(df_new)] = {'X': row['X'], 'Y': row['Y'], 'TStamp': row['TStamp'], 'Pres': row['Pres'], 'EndPts': row['EndPts'],\n 'TStamp2': row['TStamp2']}\n old_x = row['X']\n old_y = row['Y']\n # update the EndPts if the point is \n elif row['X'] == old_x and row['Y'] == old_y and row['EndPts'] == 1:\n df_new.iloc[len(df_new)-1]['EndPts'] = 1\n return df_new", "_____no_output_____" ], [ "\"\"\"\nThe Ramer-Douglas-Peucker algorithm roughly ported from the pseudo-code provided\nby http://en.wikipedia.org/wiki/Ramer-Douglas-Peucker_algorithm\n\"\"\"\n\nfrom math import sqrt\n\ndef distance(a, b):\n return sqrt((a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2)\n\ndef point_line_distance(point, start, end):\n if (start == end):\n return distance(point, start)\n else:\n n = abs(\n (end[0] - start[0]) * (start[1] - point[1]) - (start[0] - point[0]) * (end[1] - start[1])\n )\n d = sqrt(\n (end[0] - start[0]) ** 2 + (end[1] - start[1]) ** 2\n )\n return n / d\n\ndef rdp(points, epsilon):\n \"\"\"\n Reduces a series of points to a simplified version that loses detail, but\n maintains the general shape of the series.\n \"\"\"\n dmax = 0.0\n index = 0\n for i in range(1, len(points) - 1):\n d = point_line_distance(points[i], points[0], points[-1])\n if d > dmax:\n index = i\n dmax = d\n if dmax >= epsilon:\n results = rdp(points[:index+1], epsilon)[:-1] + rdp(points[index:], epsilon)\n else:\n results = [points[0], points[-1]]\n return results\n\n\ndef rdp_precoss(df):\n l = []\n for i in range(0, df.shape[0]):\n l.append((df.loc[i, 'X'], df.loc[i, 'Y'], df.loc[i, 'TStamp'], df.loc[i, 'Pres'], df.loc[i, 'EndPts'], df.loc[i, 'TStamp2'] ))\n\n final = rdp(l, 0.000000001)\n df = pd.DataFrame(final, columns=['X', 'Y', 'TStamp', 'Pres', 'EndPts', 'TStamp2']) \n return df", "_____no_output_____" ], [ "intervals = []\nfor index, row in file.iterrows():\n if index == 0:\n intervals.append(0)\n continue\n# print(file['TStamp2'][index-1])\n interval = file['TStamp2'][index]-file['TStamp2'][index-1]\n if interval > 300:\n continue\n intervals.append(interval) \n# intervals = np.array(intervals)\n\nprint(len(intervals))\nfig = plt.figure(figsize=[20, 10])\nplt.bar(range(len(intervals)), intervals)\n\n# 显示横轴标签\nplt.xlabel(\"Point sequence\")\n# 显示纵轴标签\nplt.ylabel(\"Time interval(ms)\")\n# 显示图标题\nplt.title(\"Histogram\")\nfig.show() # it is clearly shown that the intervals art not the same", "276\n" ], [ "# Cubic-Spline to add points in stroke and make the curves smooth\ndef interpolate_points(df):\n plt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签\n plt.rcParams['axes.unicode_minus']=False #用来正常显示负号\n\n df_new = df.loc[df[\"EndPts\"] == 1]\n # 按手指离开屏幕的点,分笔触差值,因为在笔触间差值没有意义\n # 寻找断点\n EndPts = [0]\n EndPts.extend(list(df_new.index))\n EndPts_len = len(EndPts)\n if EndPts_len < 2:\n EndPts.append(len(df)-2) #如果没有就设最后一个点为离开屏幕的点\n EndPts_len += 1\n print(EndPts) \n i = 0\n \n new_x_all = []\n iy3_x_all = []\n iy3_y_all = []\n while i < EndPts_len - 1:\n #data preparation\n start = EndPts[i]\n end = EndPts[i+1] + 1\n if start == 0:\n X= np.array(df['TStamp2'][0:end].tolist())\n Y= np.array(df['X'][0:end].tolist())\n Y2= np.array(df['Y'][0:end].tolist())\n if df['TStamp2'][start+1:end].max() - df['TStamp2'][start+1:end].min() <=15 or end - start <= 6:\n i+=1\n continue \n new_x=np.arange(0,df['TStamp2'][0:end].max(),7) #define interpolate points\n else: \n X= np.array(df['TStamp2'][start+1:end].tolist())\n Y= np.array(df['X'][start+1:end].tolist())\n Y2= np.array(df['Y'][start+1:end].tolist())\n if df['TStamp2'][start+1:end].max() - df['TStamp2'][start+1:end].min() <=15 or end - start <= 6:\n i+=1\n continue \n new_x=np.arange(df['TStamp2'][start+1:end].min(),df['TStamp2'][start+1:end].max(),7) #define interpolate points \n\n # #进行一阶样条插值\n # ipo1=spi.splrep(X,Y,k=1,s=10) #样本点导入,生成参数\n # iy1=spi.splev(new_x,ipo1) #根据观测点和样条参数,生成插值\n\n #进行三次样条拟合\n ipo3=spi.splrep(X,Y,k=3, s=1000) #样本点导入,生成参数\n iy3=spi.splev(new_x,ipo3) #根据观测点和样条参数,生成插值\n ipo3_y=spi.splrep(X,Y2,k=3, s=1000) #样本点导入,生成参数\n iy3_y=spi.splev(new_x,ipo3_y) #根据观测点和样条参数,生成插值 \n new_x_all.extend(new_x)\n iy3_x_all.extend(iy3)\n iy3_y_all.extend(iy3_y)\n i += 1\n\n print(len(iy3_x_all)) \n X_all= np.array(df['TStamp2'].tolist())\n Y_all = np.array(df['X'].tolist())\n Y2_all = np.array(df['Y'].tolist())\n \n ##作图\n fig,(ax1,ax2)=plt.subplots(2,1,figsize=(10,12))\n\n ax1.plot(X_all,Y2_all,'o',label='样本点')\n ax1.plot(new_x_all,iy3_y_all,'*-',label='插值点')\n ax1.set_ylim(Y2_all.min()-20,Y2_all.max()+20)\n ax1.set_ylabel('Y坐标')\n ax1.set_title('Y三次线性插值')\n ax1.legend()\n\n ax2.plot(X_all,Y_all,'o',label='样本点')\n ax2.plot(new_x_all,iy3_x_all,'*-',label='插值点')\n ax2.set_ylim(Y_all.min()-20,Y_all.max()+20)\n ax2.set_ylabel('X坐标')\n ax2.set_title('X三次样条插值')\n ax2.legend() \n fig.show()\n \n# df_new = df.loc[df[\"EndPts\"] == 1]\n# result = pd.concat(frames)\n# df_new.index = range(len(df_new.index))\n df_new2 = df.drop(index=df.index)\n for i, val in enumerate(new_x_all):\n# if val in df['TStamp2'].tolist():\n# continue\n# if val in df['TStamp2'].tolist():\n# continue\n df_new2.loc[len(df_new2)] = {'X': iy3_x_all[i], 'Y': iy3_y_all[i], 'TStamp': 0, 'Pres': 0, 'EndPts': 0,'TStamp2': val}\n \n df_new = pd.concat([df_new, df_new2])\n df_new = df_new.sort_values(by=['TStamp2'])\n df_new.index = range(len(df_new.index))\n# print(df_new)\n return df_new\n# return df\n# df_new = remove_duplicated_point(df_new)\n \n# fig = plt.figure(figsize=[10,6])\n# plt.plot( df_new['TStamp2'], df_new[\"X\"],'c*-')\n# fig.show()\n# fig = plt.figure(figsize=[10,6])\n# plt.plot( df['TStamp2'], df[\"X\"],'c*-')\n# fig.show() \n\nfile = remove_duplicated_point(file) # use RDP algorithm to remove duplicated points\nfile3 = remove_duplicated_point(file3) # use RDP algorithm to remove duplicated points\nfile_fake = remove_duplicated_point(file_fake) # use RDP algorithm to remove duplicated points\n\n# file = rdp_precoss(file) # use RDP algorithm to remove duplicated points\n# file3 = rdp_precoss(file3) # use RDP algorithm to remove duplicated points\n# file_fake = rdp_precoss(file_fake) # use RDP algorithm to remove duplicated points\n\nfile = interpolate_points(file)\nfile3 = interpolate_points(file3)\nfile_fake = interpolate_points(file_fake)\n\n\n", "[0, 144, 253]\n227\n[0, 143, 193, 231]\n204\n[0, 154, 206, 235]\n206\n" ], [ " from decimal import Decimal\ndef get_gravity_point(points):\n \"\"\"\n @brief 获取多边形的重心点\n @param points The points\n @return The center of gravity point.\n \"\"\"\n if len(points) <= 2:\n return list()\n\n area = Decimal(0.0)\n x, y = Decimal(0.0), Decimal(0.0)\n for i in range(len(points)):\n lng = Decimal(points[i][0].item())\n lat = Decimal(points[i][1].item())\n nextlng = Decimal(points[i-1][0].item())\n nextlat = Decimal(points[i-1][1].item())\n\n tmp_area = (nextlng*lat - nextlat*lng)/Decimal(2.0)\n area += tmp_area\n x += tmp_area*(lng+nextlng)/Decimal(3.0)\n y += tmp_area*(lat+nextlat)/Decimal(3.0)\n x = x/area\n y = y/area\n return [float(x), float(y)]", "_____no_output_____" ], [ "# 求两直线夹角\ndef get_angle_betw_lines(x1, y1, x2, y2, x3, y3, x4, y4):\n k1 = (y2-y1)/(float(x2-x1))\n k2 = (y4-y3)/(float(x4-x3))\n Cobb = math.fabs(np.arctan((k1-k2)/(float(1 + k1*k2)))*180/np.pi)+0.5\n return Cobb\n\ndef get_grivity_angle(P):\n points_left = []\n points_right = []\n for point in P.exterior.coords:\n if point[0] <= P.centroid.x:\n points_left.append([point[0], point[1]])\n else:\n points_right.append([point[0], point[1]])\n\n P_left = Polygon(points_left)\n P_right = Polygon(points_right) \n\n\n# print( P_left.centroid)\n theta = get_angle_betw_lines( P_left.centroid.x, P_left.centroid.y, P_right.centroid.x, P_right.centroid.y, \n 0,0, 10,0) \n return theta\n\n\n\n#rotate for Polygon\ndef rotate_polygon(polygon, angle, center_point=(0, 0)):\n \"\"\"Rotates the given polygon which consists of corners represented as (x,y)\n around center_point (origin by default)\n Rotation is counter-clockwise\n Angle is in degrees\n \"\"\"\n rotated_polygon = []\n for corner in polygon.exterior.coords:\n rotated_corner = rotate_point(corner, angle, center_point)\n rotated_polygon.append(rotated_corner)\n rotated_polygon = Polygon(rotated_polygon) \n return rotated_polygon\n\ndef rotate_point(point, angle, center_point=(0, 0)):\n \"\"\"Rotates a point around center_point(origin by default)\n Angle is in degrees.\n Rotation is counter-clockwise\n \"\"\"\n angle_rad = radians(angle % 360)\n # Shift the point so that center_point becomes the origin\n new_point = (point[0] - center_point[0], point[1] - center_point[1])\n new_point = (new_point[0] * cos(angle_rad) - new_point[1] * sin(angle_rad),\n new_point[0] * sin(angle_rad) + new_point[1] * cos(angle_rad))\n # Reverse the shifting we have done\n new_point = (new_point[0] + center_point[0], new_point[1] + center_point[1])\n return new_point\n\n\n\n# gravity_x, gravity_y = gravity_normalize(file)\n# print(gravity_x, gravity_y)\n\nfrom shapely.geometry import Polygon\n\ndef rotate_graphic(file):\n points_left = []\n points_right = []\n points = []\n\n for index, row in file.iterrows():\n if abs(row['X']) > 100000 or abs(row['Y']) > 100000:\n continue\n points.append([row['X'], row['Y']])\n\n \n \n P = Polygon(points)\n print(P.centroid)\n plt.figure()\n plt.plot(P.centroid.y,P.centroid.x,\"rx\") \n plt.show() # if you need... \n \n theta = get_grivity_angle(P)\n theta_old = theta\n print(theta)\n \n if theta >6 and get_grivity_angle(rotate_polygon(P, theta_old/24, (P.centroid.x, P.centroid.y))) < theta: \n while theta > 6.0 :\n# print(\"here\")\n # if theta <= 90: # clock-wise 顺时针\n P = rotate_polygon(P, theta_old/24, (P.centroid.x, P.centroid.y)) \n theta = get_grivity_angle(P)\n elif theta >6 and get_grivity_angle(rotate_polygon(P, -theta_old/24, (P.centroid.x, P.centroid.y))) < theta_old:\n while theta > 6.0 :\n P = rotate_polygon(P, -theta_old/24, (P.centroid.x, P.centroid.y)) \n theta = get_grivity_angle(P) \n \n P_rotated = P\n P_rotated_points_x = []\n P_rotated_points_y = []\n for point in P_rotated.exterior.coords:\n P_rotated_points_x.append(point[0]) \n P_rotated_points_y.append(point[1]) \n\n print(P_rotated.centroid)\n\n\n\n\n fig = plt.figure(figsize=[6,4])\n plt.scatter(x = file[\"Y\"], y = file[\"X\"], marker='o',c='', edgecolors='g')\n\n# plt.plot(gravity_y, gravity_x, 'rx')\n# plt.plot(P_left.centroid.y, P_left.centroid.x, 'rx')\n# plt.plot(P_right.centroid.y, P_right.centroid.x, 'rx')\n\n plt.scatter(x = P_rotated_points_y, y = P_rotated_points_x, marker='o',c='', edgecolors='b')\n\n points_left = []\n points_right = []\n for point in P_rotated.exterior.coords:\n if point[0] <= P_rotated.centroid.x:\n points_left.append([point[0], point[1]])\n else:\n points_right.append([point[0], point[1]])\n\n P_left = Polygon(points_left)\n P_right = Polygon(points_right) \n plt.plot(P_left.centroid.y, P_left.centroid.x, 'bx')\n plt.plot(P_right.centroid.y, P_right.centroid.x, 'bx')\n \n\n theta = get_angle_betw_lines( P_left.centroid.x, P_left.centroid.y, P_right.centroid.x, P_right.centroid.y, \n 0,0, 10,0)\n print(theta)\n fig.show()\n\n file['X'] = P_rotated_points_x[0:-1]\n file['Y'] = P_rotated_points_y[0:-1]\n return file\n\n# file = rotate_graphic(file)\n# file3 = rotate_graphic(file3)\n# file_fake = rotate_graphic(file_fake)", "_____no_output_____" ], [ "# # 求两直线夹角\n# def get_angle_betw_lines(x1, y1, x2, y2, x3, y3, x4, y4):\n# k1 = (y2-y1)/(float(x2-x1))\n# k2 = (y4-y3)/(float(x4-x3))\n# Cobb = math.fabs(np.arctan((k1-k2)/(float(1 + k1*k2)))*180/np.pi)+0.5\n# return Cobb\n\n\n# def get_grivity_angle(P):\n# points_left = []\n# points_right = []\n# points_x = []\n# for point in P.exterior.coords:\n# points_x.append(point[0]) \n# # points_y.append(point[1])\n \n# for point in P.exterior.coords:\n# if point[0] <= np.mean(points_x):\n# points_left.append([point[0], point[1]])\n# else:\n# points_right.append([point[0], point[1]])\n \n# left_xs, left_ys = zip(*points_left) #create lists of x and y values\n# left_xs = list(left_xs)\n# left_ys = list(left_ys)\n# right_xs, right_ys = zip(*points_right) #create lists of x and y values\n# right_xs = list(right_xs)\n# right_ys = list(right_ys) \n# # print( P_left.centroid)\n# theta = get_angle_betw_lines( np.mean(left_xs), np.mean(left_ys), np.mean(right_xs), np.mean(right_xs), \n# 0,0, 10,0) \n# return theta\n\n# #rotate for Polygon\n# def rotate_polygon(polygon, angle, center_point=(0, 0)):\n# \"\"\"Rotates the given polygon which consists of corners represented as (x,y)\n# around center_point (origin by default)\n# Rotation is counter-clockwise\n# Angle is in degrees\n# \"\"\"\n# rotated_polygon = []\n# for corner in polygon.exterior.coords:\n# rotated_corner = rotate_point(corner, angle, center_point)\n# rotated_polygon.append(rotated_corner)\n# rotated_polygon = Polygon(rotated_polygon) \n# return rotated_polygon\n\n# def rotate_point(point, angle, center_point=(0, 0)):\n# \"\"\"Rotates a point around center_point(origin by default)\n# Angle is in degrees.\n# Rotation is counter-clockwise\n# \"\"\"\n# angle_rad = radians(angle % 360)\n# # Shift the point so that center_point becomes the origin\n# new_point = (point[0] - center_point[0], point[1] - center_point[1])\n# new_point = (new_point[0] * cos(angle_rad) - new_point[1] * sin(angle_rad),\n# new_point[0] * sin(angle_rad) + new_point[1] * cos(angle_rad))\n# # Reverse the shifting we have done\n# new_point = (new_point[0] + center_point[0], new_point[1] + center_point[1])\n# return new_point\n\n\n\n# # gravity_x, gravity_y = gravity_normalize(file)\n# # print(gravity_x, gravity_y)\n\n# from shapely.geometry import Polygon\n\n# def rotate_graphic(file):\n# points_left = []\n# points_right = []\n# points = []\n\n# for index, row in file.iterrows():\n# if abs(row['X']) > 100000 or abs(row['Y']) > 100000:\n# continue\n# points.append([row['X'], row['Y']])\n\n \n \n# P = Polygon(points)\n# print(P.centroid)\n# xs, ys = zip(*points) #create lists of x and y values\n# xs = list(xs)\n# ys = list(ys)\n# print(max(xs),min(xs),max(ys),min(ys))\n# plt.figure()\n# plt.plot(ys,xs) \n# plt.plot(np.mean(ys),np.mean(xs),\"rx\") \n \n# plt.show() # if you need...\n# print(np.mean(xs),np.mean(ys))\n \n \n# theta = get_grivity_angle(P)\n# theta_old = theta \n \n# print(\"theta_old:\", theta_old)\n# if theta >6 and get_grivity_angle(rotate_polygon(P, theta_old/24, (np.mean(xs), np.mean(ys)))) < theta: \n# while theta > 6.0 :\n# # print(\"here\")\n# # if theta <= 90: # clock-wise 顺时针\n# P = rotate_polygon(P, theta_old/24, (np.mean(xs), np.mean(ys))) \n# theta = get_grivity_angle(P)\n# print(theta)\n# elif theta >6 and get_grivity_angle(rotate_polygon(P, -theta_old/24, (np.mean(xs), np.mean(ys)))) < theta_old:\n# while theta > 6.0 :\n# P = rotate_polygon(P, -theta_old/24, (np.mean(xs), np.mean(ys))) \n# theta = get_grivity_angle(P) \n \n# P_rotated = P\n# P_rotated_points_x = []\n# P_rotated_points_y = []\n# for point in P_rotated.exterior.coords:\n# P_rotated_points_x.append(point[0]) \n# P_rotated_points_y.append(point[1]) \n\n# print(P_rotated.centroid)\n# print(theta,\"here\")\n\n\n\n\n# fig = plt.figure(figsize=[6,4])\n# plt.scatter(x = file[\"Y\"], y = file[\"X\"], marker='o',c='', edgecolors='g')\n\n# # plt.plot(gravity_y, gravity_x, 'rx')\n# # plt.plot(P_left.centroid.y, P_left.centroid.x, 'rx')\n# # plt.plot(P_right.centroid.y, P_right.centroid.x, 'rx')\n\n# plt.scatter(x = P_rotated_points_y, y = P_rotated_points_x, marker='o',c='', edgecolors='b')\n\n# points_left = []\n# points_right = []\n# for point in P_rotated.exterior.coords:\n# if point[0] <= P_rotated.centroid.x:\n# points_left.append([point[0], point[1]])\n# else:\n# points_right.append([point[0], point[1]])\n\n# P_left = Polygon(points_left)\n# P_right = Polygon(points_right)\n\n# left_xs, left_ys = zip(*points_left) #create lists of x and y values\n# left_xs = list(left_xs)\n# left_ys = list(left_ys)\n# right_xs, right_ys = zip(*points_right) #create lists of x and y values\n# right_xs = list(right_xs)\n# right_ys = list(right_ys) \n \n# plt.plot(np.mean(left_xs), np.mean(left_ys), 'rx')\n# plt.plot(np.mean(right_xs), np.mean(right_ys), 'rx')\n \n\n# # theta = get_angle_betw_lines( P_left.centroid.x, P_left.centroid.y, P_right.centroid.x, P_right.centroid.y, \n# # 0,0, 10,0)\n# # print(theta)\n# fig.show()\n\n# # file['X'] = P_rotated_points_x[0:-1]\n# # file['Y'] = P_rotated_points_y[0:-1]\n# return file\n\n# file = rotate_graphic(file)\n# # file3 = rotate_graphic(file3)\n# # file_fake = rotate_graphic(file_fake)", "_____no_output_____" ], [ "# add columns\nfile['normalX'] = file['X']\nfile['normalY'] = file['Y']\nfile3['normalX'] = file3['X']\nfile3['normalY'] = file3['Y']\nfile_fake['normalX'] = file_fake['X']\nfile_fake['normalY'] = file_fake['Y']", "_____no_output_____" ], [ "# Size normalization 大小规整\n# position normalization 位置规整\ndef normalizeFile(normalInputFile):\n widthX = 200 #width\n heightY = 500 #height\n minX = normalInputFile['X'].min()\n minY = normalInputFile['Y'].min()\n maxX = normalInputFile['X'].max()\n maxY = normalInputFile['Y'].max()\n normalInputFile['normalX'] = widthX * ((normalInputFile['X'] - minX)/(maxX - minX))\n normalInputFile['normalY'] = heightY * ((normalInputFile['Y'] - minY)/(maxY - minY))\n \n #position normalization\n averX = normalInputFile['normalX'].mean()\n averY = normalInputFile['normalY'].mean()\n\n normalInputFile['normalX'] = normalInputFile['normalX'] - averX\n normalInputFile['normalY'] = normalInputFile['normalY'] - averY\n return normalInputFile\n\n# # Gravity normalization 重心规整\n# def gravity_normalize(df):\n# points = []\n# for index, row in df.iterrows():\n# points.append([row['normalX'], row['normalY']])\n \n# return get_centerpoint(points)\n\n\n# def get_centerpoint(lis):\n# area = 0.0\n# x,y = 0.0,0.0\n \n# a = len(lis)\n# for i in range(a):\n# lat = lis[i][0] #weidu\n# lng = lis[i][1] #jingdu\n \n# if i == 0:\n# lat1 = lis[-1][0]\n# lng1 = lis[-1][1]\n \n# else:\n# lat1 = lis[i-1][0]\n# lng1 = lis[i-1][1]\n \n# fg = (lat*lng1 - lng*lat1)/2.0\n \n# area += fg\n# x += fg*(lat+lat1)/3.0\n# y += fg*(lng+lng1)/3.0\n \n# x = x/area\n# y = y/area\n \n# return x,y", "_____no_output_____" ], [ "file = normalizeFile(file)\nfile3 = normalizeFile(file3)\nfile_fake = normalizeFile(file_fake)\n\n\n#整理后的两张图片对比\nfig = plt.figure(figsize=[6,4])\nplt.scatter(x = file[\"normalY\"], y = file[\"normalX\"], marker='o',c='', edgecolors='g')\nplt.scatter(x= file3['normalY'], y= file3['normalX'] , marker='o',c='', edgecolors='r')\nplt.scatter(x= file_fake['normalY'], y= file_fake['normalX'] , marker='o',c='', edgecolors='b')\n\nfig.show()\n", "_____no_output_____" ], [ "# # 求两直线夹角\n# def get_angle_betw_lines(x1, y1, x2, y2, x3, y3, x4, y4):\n# k1 = (y2-y1)/(float(x2-x1))\n# k2 = (y4-y3)/(float(x4-x3))\n# Cobb = math.fabs(np.arctan((k1-k2)/(float(1 + k1*k2)))*180/np.pi)+0.5\n# return Cobb\n\n# def get_grivity_angle(P):\n# points_left = []\n# points_right = []\n# for point in P.exterior.coords:\n# if point[0] <= P.centroid.x:\n# points_left.append([point[0], point[1]])\n# else:\n# points_right.append([point[0], point[1]])\n\n# P_left = Polygon(points_left)\n# P_right = Polygon(points_right) \n# # plt.plot(P_left.centroid.y, P_left.centroid.x, 'bx')\n# # plt.plot(P_right.centroid.y, P_right.centroid.x, 'bx')\n\n# theta = get_angle_betw_lines( P_left.centroid.x, P_left.centroid.y, P_right.centroid.x, P_right.centroid.y, \n# 0,0, 10,0)\n# return theta\n\n\n# #rotate for Polygon\n# def rotate_polygon(polygon, angle, center_point=(0, 0)):\n# \"\"\"Rotates the given polygon which consists of corners represented as (x,y)\n# around center_point (origin by default)\n# Rotation is counter-clockwise\n# Angle is in degrees\n# \"\"\"\n# rotated_polygon = []\n# for corner in polygon.exterior.coords:\n# rotated_corner = rotate_point(corner, angle, center_point)\n# rotated_polygon.append(rotated_corner)\n# rotated_polygon = Polygon(rotated_polygon) \n# return rotated_polygon\n\n# def rotate_point(point, angle, center_point=(0, 0)):\n# \"\"\"Rotates a point around center_point(origin by default)\n# Angle is in degrees.\n# Rotation is counter-clockwise\n# \"\"\"\n# angle_rad = radians(angle % 360)\n# # Shift the point so that center_point becomes the origin\n# new_point = (point[0] - center_point[0], point[1] - center_point[1])\n# new_point = (new_point[0] * cos(angle_rad) - new_point[1] * sin(angle_rad),\n# new_point[0] * sin(angle_rad) + new_point[1] * cos(angle_rad))\n# # Reverse the shifting we have done\n# new_point = (new_point[0] + center_point[0], new_point[1] + center_point[1])\n# return new_point\n\n\n\n# # gravity_x, gravity_y = gravity_normalize(file)\n# # print(gravity_x, gravity_y)\n\n# from shapely.geometry import Polygon\n\n# def rotate_graphic(file):\n# points_left = []\n# points_right = []\n# points = []\n\n# for index, row in file.iterrows():\n# points.append([row['normalX'], row['normalY']])\n\n# P = Polygon(points)\n# print(P.centroid)\n\n\n# # for index, row in file.iterrows():\n# # if row['normalX'] <= P.centroid.x:\n# # points_left.append([row['normalX'], row['normalY']])\n# # else:\n# # points_right.append([row['normalX'], row['normalY']])\n\n# # P_left = Polygon(points_left)\n# # P_right = Polygon(points_right)\n# # #print(P_left.centroid, P_right.centroid)\n\n# # theta = get_angle_betw_lines( P_left.centroid.x, P_left.centroid.y, P_right.centroid.x, P_right.centroid.y, \n# # 0,0, 10,0)\n# theta = get_grivity_angle(P)\n# theta_old = theta\n# print(theta)\n \n \n# if theta >6 and get_grivity_angle(rotate_polygon(P, theta_old/24, (P.centroid.x, P.centroid.y))) < theta: \n# while theta > 6.0 :\n# # print(\"here\")\n# # if theta <= 90: # clock-wise 顺时针\n# P = rotate_polygon(P, theta_old/24, (P.centroid.x, P.centroid.y)) \n# theta = get_grivity_angle(P)\n# elif theta >6 and get_grivity_angle(rotate_polygon(P, -theta_old/24, (P.centroid.x, P.centroid.y))) < theta_old:\n# while theta > 6.0 :\n# P = rotate_polygon(P, -theta_old/24, (P.centroid.x, P.centroid.y)) \n# theta = get_grivity_angle(P) \n \n# P_rotated = P\n# P_rotated_points_x = []\n# P_rotated_points_y = []\n# for point in P_rotated.exterior.coords:\n# P_rotated_points_x.append(point[0]) \n# P_rotated_points_y.append(point[1]) \n\n# print(P_rotated.centroid)\n\n\n\n\n# fig = plt.figure(figsize=[6,4])\n# plt.scatter(x = file[\"normalY\"], y = file[\"normalX\"], marker='o',c='', edgecolors='g')\n\n# # plt.plot(gravity_y, gravity_x, 'rx')\n# # plt.plot(P_left.centroid.y, P_left.centroid.x, 'rx')\n# # plt.plot(P_right.centroid.y, P_right.centroid.x, 'rx')\n\n# plt.scatter(x = P_rotated_points_y, y = P_rotated_points_x, marker='o',c='', edgecolors='b')\n\n# points_left = []\n# points_right = []\n# for point in P_rotated.exterior.coords:\n# if point[0] <= P_rotated.centroid.x:\n# points_left.append([point[0], point[1]])\n# else:\n# points_right.append([point[0], point[1]])\n\n# P_left = Polygon(points_left)\n# P_right = Polygon(points_right) \n# plt.plot(P_left.centroid.y, P_left.centroid.x, 'bx')\n# plt.plot(P_right.centroid.y, P_right.centroid.x, 'bx')\n\n# theta = get_angle_betw_lines( P_left.centroid.x, P_left.centroid.y, P_right.centroid.x, P_right.centroid.y, \n# 0,0, 10,0)\n# print(theta)\n# # plt.plot(gravity_x, gravity_y, 'bx')\n\n\n# fig.show()\n\n# file['normalX'] = P_rotated_points_x[0:-1]\n# file['normalY'] = P_rotated_points_y[0:-1]\n# return file\n\n# file = rotate_graphic(file)\n# file3 = rotate_graphic(file3)\n\n", "_____no_output_____" ], [ "#the plot before length normalization\nfig = plt.figure(figsize=[9,6])\nplt.plot(file[\"TStamp2\"], file[\"normalX\"],'c*-', )\nplt.plot(file3['TStamp2'], file3['normalX'] , 'm.-.')\nplt.plot(file_fake['TStamp2'], file_fake['normalX'] , 'r.-.')\n#we can see clearly, the length of two plots are not same", "_____no_output_____" ], [ "def samelen(data,length=400):\n#归一化标准长度为 400,若某一签名长度为 d,则需在此签名中每隔\n#(d-1)/400 个坐标点提取一个数据, 所取得的数据根据前后两个点进行线性提取 \n data_size = len(data)\n interval = (data_size-1)/length\n start = 0\n new_data = data.drop(index=data.index)\n for dist in np.arange(start, data_size-1, interval):\n first = math.floor(dist)\n second = math.ceil(dist)\n if second >= data_size:\n second = data_size -1\n percent = (dist - first) / 1.0 \n #'X','Y','TStamp','Pres','EndPts'\n# print ((data.X[first] + data.X[second])/2)\n\n# X = (data.at[first,'X'] + data.at[second,'X']) / 2\n# Y = (data.Y[first] + data.Y[second])/2 \n TStamp = (data.TStamp[first] + data.TStamp[second])/2 \n Pres = (data.Pres[first] + data.Pres[second])/2\n# normalX = (data.normalX[first] + data.normalX[second])/2\n# normalY = (data.normalY[first] + data.normalY[second])/2 \n# TStamp2 = (data.TStamp2[first] + data.TStamp2[second])/2\n \n X = data.at[first,'X'] + (data.at[second,'X'] - data.at[first,'X']) * percent\n Y = data.at[first,'Y'] + (data.at[second,'Y'] - data.at[first,'Y']) * percent\n normalX = data.at[first,'normalX'] + (data.at[second,'normalX'] - data.at[first,'normalX']) * percent\n normalY = data.at[first,'normalY'] + (data.at[second,'normalY'] - data.at[first,'normalY']) * percent \n TStamp2 = data.at[first,'TStamp2'] + (data.at[second,'TStamp2'] - data.at[first,'TStamp2']) * percent \n \n if data.EndPts[first] == 1.0 or data.EndPts[second] == 1.0:\n EndPts = 1\n else:\n EndPts = 0\n \n new_data.loc[len(new_data)] = {'X': X, 'Y': Y, 'TStamp': TStamp, 'Pres': Pres, 'EndPts': EndPts,'normalX': normalX,\n 'normalY': normalY, 'TStamp2': TStamp2}\n return new_data\n\n# d = (data[first] + data[second])/2\n# new_data.loc[len(new_data)] = d\n\ndata1 = file\ndata2 = file3\ndata3 = file_fake\ndata1 = samelen(file)\ndata2 = samelen(file3)\ndata3 = samelen(file_fake)\n\n\n# print (data1)\n# print (data1)\n# df = file \n# df=df.drop(index=df.index)\n# file.loc[len(file)] = 3\n\n# print (list(range(0,len(data1)-1, 1)))\n#the plot after length normalization\nfig = plt.figure(figsize=[8,5])\n# plt.plot(data1[\"TStamp2\"], data1[\"normalX\"],'c*-', )\n# plt.plot(data2['TStamp2'], data2['normalX'] , 'm.-.')\nplt.plot( list(range(0,len(data1), 1)), data1[\"normalX\"],'c*-')\nplt.plot( list(range(0,len(data2),1)), data2['normalX'] , 'm.-.')\n# plt.plot( list(range(0,len(data3),1)), data3['normalX'] , 'r.-.')\n#we can see clearly, the length of two plots are same", "_____no_output_____" ], [ "# def calc_extrme_points(data):\n# length=len(data)\n# data_extr_points ={}\n# data_extr_points['point'] = []\n# data_extr_points['value'] = [] \n# data_extr_points['type'] = [] \n \n# for i in range(5,length-5):\n# # local maximum point\n# local_list = list(range(i-5, i)) #如果之前5个点之内已经记录有点,则不记录\n# if data[i]>=data[i-1] and data[i]>=data[i-2] and data[i]>=data[i-3] and data[i]>=data[i-4] and data[i]>=data[i-5] and \\\n# data[i]>=data[i+1] and data[i]>=data[i+2] and data[i]>=data[i+3] and data[i]>=data[i+4] and data[i]>=data[i+5] and \\\n# len([j for j in local_list if j in data_extr_points['point'] ]) == 0:\n# data_extr_points['point'].append(i)\n# data_extr_points['value'].append(data[i])\n# data_extr_points['type'].append(1)\n# continue\n \n# # local minimum point \n# if data[i]<=data[i-1] and data[i]<=data[i-2] and data[i]<=data[i-3] and data[i]<=data[i-4] and data[i]<=data[i-5] and \\\n# data[i]<=data[i+1] and data[i]<=data[i+2] and data[i]<=data[i+3] and data[i]<=data[i+4] and data[i]<=data[i+5] and \\\n# len([j for j in local_list if j in data_extr_points['point'] ]) == 0:\n# data_extr_points['point'].append(i)\n# data_extr_points['value'].append(data[i])\n# data_extr_points['type'].append(0)\n \n# #add start,end point\n# if data_extr_points['type'][0] == 0:\n# start_type = 1\n# else:\n# start_type = 0\n# data_extr_points['point'].insert(0, 0)\n# data_extr_points['value'].insert(0, data[0])\n# data_extr_points['type'].insert(0, start_type)\n\n# if data_extr_points['type'][-1] == 0:\n# end_type = 1\n# else:\n# end_type = 0\n# data_extr_points['point'].append(length-1)\n# data_extr_points['value'].append(data[length-1])\n# data_extr_points['type'].append(end_type) \n \n# return data_extr_points\n\n\ndef calc_extrme_points(df):\n # 按手指离开屏幕的点,分笔触差值,因为在笔触间差值没有意义\n # 寻找断点\n EndPts = [0]\n EndPts.extend(list(df.loc[df[\"EndPts\"] == 1].index))\n EndPts_len = len(EndPts)\n# print(EndPts)\n if EndPts_len < 2:\n EndPts.append(len(df)-1) #如果没有就设最后一个点为离开屏幕的点\n EndPts_len += 1\n i = 0\n EndPts2 = []\n while i < EndPts_len - 1:\n start = EndPts[i]\n end = EndPts[i+1]\n if end - start <= 6:\n i += 1 \n continue\n EndPts2.append(start)\n i += 1\n \n if EndPts[len(EndPts)-1] - EndPts2[len(EndPts2)-1] > 6:\n EndPts2.append(EndPts[len(EndPts)-1])\n \n re = {}\n re['point'] = EndPts2\n return re \n\n# data1_extr_points = calc_extrme_points(data1['normalX'].tolist())\n# data2_extr_points = calc_extrme_points(data2['normalX'].tolist())\ndata1_extr_points = calc_extrme_points(data1)\ndata2_extr_points = calc_extrme_points(data2)\n\nprint(len(data1_extr_points['point']), len(data2_extr_points['point']))\nprint(data1_extr_points['point'])\nprint(data2_extr_points['point'])\ndata2_extr_points['point'] = [0, 249, 399]\n\n\nfig = plt.figure(figsize=[8,5])\n# print(data1.loc[data1[\"EndPts\"] == 1.0].head())\n# print(data1)\n\nplt.plot( list(range(0,len(data1), 1)), data1[\"normalX\"],'c*-')\nplt.plot( list(range(0,len(data2),1)), data2['normalX'] , 'm.-.')\n# plt.plot( data1_extr_points['point'], data1_extr_points['value'],'rx')\n# plt.plot( data2_extr_points['point'], data2_extr_points['value'],'rx')", "3 4\n[0, 226, 400]\n[0, 250, 335, 399]\n" ], [ "# calculate dtw distance by strokes\n\ndef get_stroke(df, extr_points, index):\n start = extr_points['point'][index]\n end = extr_points['point'][index+1]\n df_new = df.loc[start:end].copy()\n# print(df)\n# print(df_new)\n# df_new = normalizeFile(df_new)\n \n #set ts_a\n normalX = np.array(df_new['normalX'])\n normalY = np.array(df_new['normalY'])\n list(zip(normalX,normalY))\n ts_a = np.array(list(zip(normalX,normalY)),dtype=float)\n \n return ts_a, df_new\n\n\ntotal_distance = 0.0\nprint(len(data1_extr_points['point']), len(data2_extr_points['point']))\nif len(data1_extr_points['point']) == len(data2_extr_points['point']):\n i = 0\n while i< len(data1_extr_points['point']) -1:\n ts_a, df_stroke1 = get_stroke(data1, data1_extr_points, i)\n ts_b, df_stroke2 = get_stroke(data2, data2_extr_points, i)\n\n fig = plt.figure(figsize=[8,5])\n # plt.plot(data1[\"TStamp2\"], data1[\"normalX\"],'c*-', )\n # plt.plot(data2['TStamp2'], data2['normalX'] , 'm.-.')\n plt.plot( list(range(0,len(df_stroke1),1)),df_stroke1[\"normalX\"],'c*-')\n plt.plot( list(range(0,len(df_stroke2),1)),df_stroke2['normalX'],'m.-.')\n fig.show()\n distance, path = fastdtw(ts_a, ts_b, dist=euclidean)\n total_distance += distance\n# break\n i +=1\nprint(\"DTW distance by stroke: \", total_distance)\n ", "3 3\nDTW distance by stroke: 13953.140008069684\n" ], [ "#set ts_a\nnormalX = np.array(data1['normalX'])\nnormalY = np.array(data1['normalY'])\nlist(zip(normalX,normalY))\nts_a = np.array(list(zip(normalX,normalY)),dtype=float)\n# print(normalX)\n\n#set ts_b\nnormalX = np.array(data2['normalX'])\nnormalY = np.array(data2['normalY'])\nlist(zip(normalX,normalY))\nts_b = np.array(list(zip(normalX,normalY)),dtype=float)\n\n#set ts_c\nnormalX = np.array(data3['normalX'])\nnormalY = np.array(data3['normalY'])\nlist(zip(normalX,normalY))\nts_c = np.array(list(zip(normalX,normalY)),dtype=float)\n\n\n#the plot of DTW distance for genuine signature\ndistance, path = fastdtw(ts_a, ts_b, dist=euclidean)\nprint(\"genuine signature distance: \", distance)\n# print(path)\npath = list(path)\nxpath = []\nypath = []\nfor v in path:\n xpath.append(v[0])\n ypath.append(v[1])\n\nfig = plt.figure(figsize=[6,4])\nplt.plot(xpath, ypath, color=\"r\",linewidth=1 )\nplt.title(\"genuine signature\")\nfig.show()\n#we can see clearly, the plot is smooth\n\n\n#the plot of DTW distance for forgery signature\ndistance, path = fastdtw(ts_b, ts_c, dist=euclidean)\nprint(\"forgery signature distance: \", distance)\n# print(path)\npath = list(path)\nxpath = []\nypath = []\nfor v in path:\n xpath.append(v[0])\n ypath.append(v[1])\nfig = plt.figure(figsize=[6,4])\nplt.plot(xpath, ypath,color=\"r\",linewidth=1 )\nplt.title(\"forgery signature\")\nfig.show()\n#we can see clearly, the plot is not smooth\n", "genuine signature distance: 13655.445413425781\nforgery signature distance: 9582.046953734965\n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4a006058d8cfa06114087f9ba5703003dc40bad4
21,461
ipynb
Jupyter Notebook
Courses/Natural Language Processing Specialization/Natural Language Processing with Attention Models/Week3/C4_W3_SentencePiece_and_BPE.ipynb
piszewc/python-deep-learning-data-science
789b4796a41830040143aef073aa3f818d577b6c
[ "MIT", "Unlicense" ]
1
2020-11-21T17:06:08.000Z
2020-11-21T17:06:08.000Z
Courses/Natural Language Processing Specialization/Natural Language Processing with Attention Models/Week3/C4_W3_SentencePiece_and_BPE.ipynb
piszewc/python-deep-learning-data-science
789b4796a41830040143aef073aa3f818d577b6c
[ "MIT", "Unlicense" ]
null
null
null
Courses/Natural Language Processing Specialization/Natural Language Processing with Attention Models/Week3/C4_W3_SentencePiece_and_BPE.ipynb
piszewc/python-deep-learning-data-science
789b4796a41830040143aef073aa3f818d577b6c
[ "MIT", "Unlicense" ]
2
2020-06-06T15:40:34.000Z
2021-05-04T10:44:20.000Z
33.743711
777
0.606868
[ [ [ "# SentencePiece and BPE ", "_____no_output_____" ], [ "## Introduction to Tokenization", "_____no_output_____" ], [ "In order to process text in neural network models it is first required to **encode** text as numbers with ids, since the tensor operations act on numbers. Finally, if the output of the network is to be words, it is required to **decode** the predicted tokens ids back to text.\n\nTo encode text, the first decision that has to be made is to what level of graularity are we going to consider the text? Because ultimately, from these **tokens**, features are going to be created about them. Many different experiments have been carried out using *words*, *morphological units*, *phonemic units*, *characters*. For example, \n\n- Tokens are tricky. (raw text)\n- Tokens are tricky . ([words](https://arxiv.org/pdf/1301.3781))\n- Token s _ are _ trick _ y . ([morphemes](https://arxiv.org/pdf/1907.02423.pdf))\n- t oʊ k ə n z _ ɑː _ ˈt r ɪ k i. ([phonemes](https://www.aclweb.org/anthology/W18-5812.pdf), for STT)\n- T o k e n s _ a r e _ t r i c k y . ([character](https://www.aclweb.org/anthology/C18-1139/))", "_____no_output_____" ], [ "But how to identify these units, such as words, is largely determined by the language they come from. For example, in many European languages a space is used to separate words, while in some Asian languages there are no spaces between words. Compare English and Mandarin.\n\n- Tokens are tricky. (original sentence)\n- 标记很棘手 (Mandarin)\n- Biāojì hěn jíshǒu (pinyin)\n- 标记 很 棘手 (Mandarin with spaces)\n\n\nSo, the ability to **tokenize**, i.e. split text into meaningful fundamental units is not always straight-forward.\n\nAlso, there are practical issues of how large our *vocabulary* of words, `vocab_size`, should be, considering memory limitations vs. coverage. A compromise may be need to be made between: \n* the finest-grained models employing characters which can be memory intensive and \n* more computationally efficient *subword* units such as [n-grams](https://arxiv.org/pdf/1712.09405) or larger units.\n\nIn [SentencePiece](https://www.aclweb.org/anthology/D18-2012.pdf) unicode characters are grouped together using either a [unigram language model](https://www.aclweb.org/anthology/P18-1007.pdf) (used in this week's assignment) or [BPE](https://arxiv.org/pdf/1508.07909.pdf), **byte-pair encoding**. We will discuss BPE, since BERT and many of its variants use a modified version of BPE and its pseudocode is easy to implement and understand... hopefully!", "_____no_output_____" ], [ "## SentencePiece Preprocessing\n### NFKC Normalization", "_____no_output_____" ], [ "Unsurprisingly, even using unicode to initially tokenize text can be ambiguous, e.g., ", "_____no_output_____" ] ], [ [ "eaccent = '\\u00E9'\ne_accent = '\\u0065\\u0301'\nprint(f'{eaccent} = {e_accent} : {eaccent == e_accent}')", "_____no_output_____" ] ], [ [ "SentencePiece uses the Unicode standard normalization form, [NFKC](https://en.wikipedia.org/wiki/Unicode_equivalence), so this isn't an issue. Looking at our example from above but with normalization:", "_____no_output_____" ] ], [ [ "from unicodedata import normalize\n\nnorm_eaccent = normalize('NFKC', '\\u00E9')\nnorm_e_accent = normalize('NFKC', '\\u0065\\u0301')\nprint(f'{norm_eaccent} = {norm_e_accent} : {norm_eaccent == norm_e_accent}')", "_____no_output_____" ] ], [ [ "Normalization has actually changed the unicode code point (unicode unique id) for one of these two characters.", "_____no_output_____" ] ], [ [ "def get_hex_encoding(s):\n return ' '.join(hex(ord(c)) for c in s)\n\ndef print_string_and_encoding(s):\n print(f'{s} : {get_hex_encoding(s)}') ", "_____no_output_____" ], [ "for s in [eaccent, e_accent, norm_eaccent, norm_e_accent]:\n print_string_and_encoding(s)", "_____no_output_____" ] ], [ [ "This normalization has other side effects which may be considered useful such as converting curly quotes &ldquo; to \" their ASCII equivalent. (<sup>*</sup>Although we *now* lose directionality of the quote...)", "_____no_output_____" ], [ "### Lossless Tokenization<sup>*</sup>", "_____no_output_____" ], [ "SentencePiece also ensures that when you tokenize your data and detokenize your data the original position of white space is preserved. <sup>*</sup>However, tabs and newlines are converted to spaces, please try this experiment yourself later below.", "_____no_output_____" ], [ "To ensure this **lossless tokenization**, SentencePiece replaces white space with _ (U+2581). So that a simple join of the tokens by replace underscores with spaces can restore the white space, even if there are consecutive symbols. But remember first to normalize and then replace spaces with _ (U+2581). As the following example shows.", "_____no_output_____" ] ], [ [ "s = 'Tokenization is hard.'\ns_ = s.replace(' ', '\\u2581')\ns_n = normalize('NFKC', 'Tokenization is hard.')", "_____no_output_____" ], [ "print(get_hex_encoding(s))\nprint(get_hex_encoding(s_))\nprint(get_hex_encoding(s_n))", "_____no_output_____" ] ], [ [ "So the special unicode underscore was replaced by the ASCII unicode. Reversing the order of the second and third operations, we that the special unicode underscore was retained.", "_____no_output_____" ] ], [ [ "s = 'Tokenization is hard.'\nsn = normalize('NFKC', 'Tokenization is hard.')\nsn_ = s.replace(' ', '\\u2581')", "_____no_output_____" ], [ "print(get_hex_encoding(s))\nprint(get_hex_encoding(sn))\nprint(get_hex_encoding(sn_))", "_____no_output_____" ] ], [ [ "## BPE Algorithm\n\nNow that we have discussed the preprocessing that SentencePiece performs, we will go get our data, preprocess, and apply the BPE algorithm. We will show how this reproduces the tokenization produced by training SentencePiece on our example dataset (from this week's assignment).\n\n### Preparing our Data\nFirst, we get our Squad data and process as above.", "_____no_output_____" ] ], [ [ "import ast\n\ndef convert_json_examples_to_text(filepath):\n example_jsons = list(map(ast.literal_eval, open(filepath))) # Read in the json from the example file\n texts = [example_json['text'].decode('utf-8') for example_json in example_jsons] # Decode the byte sequences\n text = '\\n\\n'.join(texts) # Separate different articles by two newlines\n text = normalize('NFKC', text) # Normalize the text\n\n with open('example.txt', 'w') as fw:\n fw.write(text)\n \n return text", "_____no_output_____" ], [ "text = convert_json_examples_to_text('./data/data.txt')\nprint(text[:900])", "_____no_output_____" ] ], [ [ "In the algorithm the `vocab` variable is actually a frequency dictionary of the words. Further, those words have been prepended with an *underscore* to indicate that they are the beginning of a word. Finally, the characters have been delimited by spaces so that the BPE algorithm can group the most common characters together in the dictionary in a greedy fashion. We will see how that is done shortly.", "_____no_output_____" ] ], [ [ "from collections import Counter\n\nvocab = Counter(['\\u2581' + word for word in text.split()])\nvocab = {' '.join([l for l in word]): freq for word, freq in vocab.items()}", "_____no_output_____" ], [ "def show_vocab(vocab, end='\\n', limit=20):\n \"\"\"Show word frequencys in vocab up to the limit number of words\"\"\"\n shown = 0\n for word, freq in vocab.items():\n print(f'{word}: {freq}', end=end)\n shown +=1\n if shown > limit:\n break", "_____no_output_____" ], [ "show_vocab(vocab)", "_____no_output_____" ] ], [ [ "We check the size of the vocabulary (frequency dictionary) because this is the one hyperparameter that BPE depends on crucially on how far it breaks up a word into SentencePieces. It turns out that for our trained model on our small dataset that 60% of 455 merges of the most frequent characters need to be done to reproduce the upperlimit of a 32K `vocab_size` over the entire corpus of examples.", "_____no_output_____" ] ], [ [ "print(f'Total number of unique words: {len(vocab)}')\nprint(f'Number of merges required to reproduce SentencePiece training on the whole corpus: {int(0.60*len(vocab))}')", "_____no_output_____" ] ], [ [ "### BPE Algorithm\nDirectly from the BPE paper we have the following algorithm. ", "_____no_output_____" ] ], [ [ "import re, collections\n\ndef get_stats(vocab):\n pairs = collections.defaultdict(int)\n for word, freq in vocab.items():\n symbols = word.split()\n for i in range(len(symbols) - 1):\n pairs[symbols[i], symbols[i+1]] += freq\n return pairs\n\ndef merge_vocab(pair, v_in):\n v_out = {}\n bigram = re.escape(' '.join(pair))\n p = re.compile(r'(?<!\\S)' + bigram + r'(?!\\S)')\n for word in v_in:\n w_out = p.sub(''.join(pair), word)\n v_out[w_out] = v_in[word]\n return v_out\n\ndef get_sentence_piece_vocab(vocab, frac_merges=0.60):\n sp_vocab = vocab.copy()\n num_merges = int(len(sp_vocab)*frac_merges)\n \n for i in range(num_merges):\n pairs = get_stats(sp_vocab)\n best = max(pairs, key=pairs.get)\n sp_vocab = merge_vocab(best, sp_vocab)\n\n return sp_vocab", "_____no_output_____" ] ], [ [ "To understand what's going on first take a look at the third function `get_sentence_piece_vocab`. It takes in the current `vocab` word-frequency dictionary and the fraction, `frac_merges`, of the total `vocab_size` to merge characters in the words of the dictionary, `num_merges` times. Then for each *merge* operation it `get_stats` on how many of each pair of character sequences there are. It gets the most frequent *pair* of symbols as the `best` pair. Then it merges that pair of symbols (removes the space between them) in each word in the `vocab` that contains this `best` (= `pair`). Consequently, `merge_vocab` creates a new `vocab`, `v_out`. This process is repeated `num_merges` times and the result is the set of SentencePieces (keys of the final `sp_vocab`).", "_____no_output_____" ], [ "### Additional Discussion of BPE Algorithm", "_____no_output_____" ], [ "Please feel free to skip the below if the above description was enough.\n\nIn a little more detail then, we can see in `get_stats` we initially create a list of bigram (two character sequence) frequencies from our vocabulary. Later, this may include trigrams, quadgrams, etc. Note that the key of the `pairs` frequency dictionary is actually a 2-tuple, which is just shorthand notation for a pair.\n\nIn `merge_vocab` we take in an individual `pair` (of character sequences, note this is the most frequency `best` pair) and the current `vocab` as `v_in`. We create a new `vocab`, `v_out`, from the old by joining together the characters in the pair (removing the space), if they are present in a word of the dictionary.\n\n[Warning](https://regex101.com/): the expression `(?<!\\S)` means that either a whitespace character follows before the `bigram` or there is nothing before the bigram (it is the beginning of the word), similarly for `(?!\\S)` for preceding whitespace or the end of the word. ", "_____no_output_____" ] ], [ [ "sp_vocab = get_sentence_piece_vocab(vocab)\nshow_vocab(sp_vocab) ", "_____no_output_____" ] ], [ [ "## Train SentencePiece BPE Tokenizer on Example Data\n### Explore SentencePiece Model\nFirst let us explore the SentencePiece model provided with this week's assignment. Remember you can always use Python's built in `help` command to see the documentation for any object or method.", "_____no_output_____" ] ], [ [ "import sentencepiece as spm\nsp = spm.SentencePieceProcessor(model_file='./data/sentencepiece.model')", "_____no_output_____" ], [ "# help(sp)", "_____no_output_____" ] ], [ [ "Let's work with the first sentence of our example text.", "_____no_output_____" ] ], [ [ "s0 = 'Beginners BBQ Class Taking Place in Missoula!'", "_____no_output_____" ], [ "# encode: text => id\nprint(sp.encode_as_pieces(s0))\nprint(sp.encode_as_ids(s0))\n\n# decode: id => text\nprint(sp.decode_pieces(sp.encode_as_pieces(s0)))\nprint(sp.decode_ids([12847, 277]))", "_____no_output_____" ] ], [ [ "Notice how SentencePiece breaks the words into seemingly odd parts, but we've seen something similar from our work with BPE. But how close were we to this model trained on the whole corpus of examples with a `vocab_size` of 32,000 instead of 455? Here you can also test what happens to white space, like '\\n'. \n\nBut first let us note that SentencePiece encodes the SentencePieces, the tokens, and has reserved some of the ids as can be seen in this week's assignment.", "_____no_output_____" ] ], [ [ "uid = 15068\nspiece = \"\\u2581BBQ\"\nunknown = \"__MUST_BE_UNKNOWN__\"\n\n# id <=> piece conversion\nprint(f'SentencePiece for ID {uid}: {sp.id_to_piece(uid)}')\nprint(f'ID for Sentence Piece {spiece}: {sp.piece_to_id(spiece)}')\n\n# returns 0 for unknown tokens (we can change the id for UNK)\nprint(f'ID for unknown text {unknown}: {sp.piece_to_id(unknown)}')", "_____no_output_____" ], [ "print(f'Beginning of sentence id: {sp.bos_id()}')\nprint(f'Pad id: {sp.pad_id()}')\nprint(f'End of sentence id: {sp.eos_id()}')\nprint(f'Unknown id: {sp.unk_id()}')\nprint(f'Vocab size: {sp.vocab_size()}')", "_____no_output_____" ] ], [ [ "We can also check what are the ids for the first part and last part of the vocabulary.", "_____no_output_____" ] ], [ [ "print('\\nId\\tSentP\\tControl?')\nprint('------------------------')\n# <unk>, <s>, </s> are defined by default. Their ids are (0, 1, 2)\n# <s> and </s> are defined as 'control' symbol.\nfor uid in range(10):\n print(uid, sp.id_to_piece(uid), sp.is_control(uid), sep='\\t')\n \n# for uid in range(sp.vocab_size()-10,sp.vocab_size()):\n# print(uid, sp.id_to_piece(uid), sp.is_control(uid), sep='\\t')", "_____no_output_____" ] ], [ [ "### Train SentencePiece BPE model with our example.txt", "_____no_output_____" ], [ "Finally, let's train our own BPE model directly from the SentencePiece library and compare it to the results of our implemention of the algorithm from the BPE paper itself.", "_____no_output_____" ] ], [ [ "spm.SentencePieceTrainer.train('--input=example.txt --model_prefix=example_bpe --vocab_size=450 --model_type=bpe')\nsp_bpe = spm.SentencePieceProcessor()\nsp_bpe.load('example_bpe.model')\n\nprint('*** BPE ***')\nprint(sp_bpe.encode_as_pieces(s0))", "_____no_output_____" ], [ "show_vocab(sp_vocab, end = ', ')", "_____no_output_____" ] ], [ [ "Our implementation of BPE's code from the paper matches up pretty well with the library itself! The differences are probably accounted for by the `vocab_size`. There is also another technical difference in that in the SentencePiece implementation of BPE a priority queue is used to more efficiently keep track of the *best pairs*. Actually, there is a priority queue in the Python standard library called `heapq` if you would like to give that a try below! ", "_____no_output_____" ], [ "## Optionally try to implement BPE using a priority queue below", "_____no_output_____" ] ], [ [ "from heapq import heappush, heappop", "_____no_output_____" ], [ "def heapsort(iterable):\n h = []\n for value in iterable:\n heappush(h, value)\n return [heappop(h) for i in range(len(h))]", "_____no_output_____" ], [ "a = [1,4,3,1,3,2,1,4,2]\nheapsort(a)", "_____no_output_____" ] ], [ [ "For a more extensive example consider looking at the [SentencePiece repo](https://github.com/google/sentencepiece/blob/master/python/sentencepiece_python_module_example.ipynb). The last few sections of this code was repurposed from that tutorial. Thanks for your participation! Next stop BERT and T5!", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ] ]
4a0060a109fa8675f8b33aef28c1ad04a1dcc9c9
18,613
ipynb
Jupyter Notebook
assessment/spm_sr15_figure_3b_illustrative_pathways.ipynb
ktokarska/ipcc_sr15_scenario_analysis
390a1d9a1dde4ac667d946cb6167ce814099dd40
[ "Apache-2.0" ]
52
2018-10-15T07:28:15.000Z
2022-03-11T16:51:48.000Z
assessment/spm_sr15_figure_3b_illustrative_pathways.ipynb
ktokarska/ipcc_sr15_scenario_analysis
390a1d9a1dde4ac667d946cb6167ce814099dd40
[ "Apache-2.0" ]
32
2018-11-04T19:54:09.000Z
2021-12-20T16:23:27.000Z
assessment/spm_sr15_figure_3b_illustrative_pathways.ipynb
ktokarska/ipcc_sr15_scenario_analysis
390a1d9a1dde4ac667d946cb6167ce814099dd40
[ "Apache-2.0" ]
28
2018-10-15T07:28:41.000Z
2022-03-11T16:51:37.000Z
26.514245
271
0.554881
[ [ [ "### *IPCC SR15 scenario assessment*\n\n<img style=\"float: right; height: 80px; padding-left: 20px;\" src=\"../_static/IIASA_logo.png\">\n<img style=\"float: right; height: 80px;\" src=\"../_static/IAMC_logo.jpg\">\n\n# Characteristics of four illustrative model pathways\n## Figure 3b of the *Summary for Policymakers*\n\nThis notebook derives the figure panels and indicators for the table in Figure 3b in the Summary for Policymakers\nof the IPCC's _\"Special Report on Global Warming of 1.5°C\"_.\n\nThe scenario data used in this analysis can be accessed and downloaded at [https://data.ene.iiasa.ac.at/iamc-1.5c-explorer](https://data.ene.iiasa.ac.at/iamc-1.5c-explorer).", "_____no_output_____" ], [ "## Load `pyam` package and other dependencies", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nimport io\nimport itertools\nimport yaml\nimport math\nimport matplotlib.pyplot as plt\nplt.style.use('style_sr15.mplstyle')\n%matplotlib inline\nimport pyam", "_____no_output_____" ] ], [ [ "## Import scenario data, categorization and specifications files\n\nThe metadata file with scenario categorisation and quantitative indicators can be downloaded at [https://data.ene.iiasa.ac.at/iamc-1.5c-explorer](https://data.ene.iiasa.ac.at/iamc-1.5c-explorer). \nAlternatively, it can be re-created using the notebook `sr15_2.0_categories_indicators`.\n\nThe last cell of this section loads and assigns a number of auxiliary lists as defined in the categorization notebook.", "_____no_output_____" ] ], [ [ "sr1p5 = pyam.IamDataFrame(data='../data/iamc15_scenario_data_world_r2.0.xlsx')", "_____no_output_____" ], [ "sr1p5.load_meta('sr15_metadata_indicators.xlsx')", "_____no_output_____" ], [ "with open(\"sr15_specs.yaml\", 'r') as stream:\n specs = yaml.load(stream, Loader=yaml.FullLoader)\n\nrc = pyam.run_control()\nfor item in specs.pop('run_control').items():\n rc.update({item[0]: item[1]})\ncats_15 = specs.pop('cats_15')\ncats_15_no_lo = specs.pop('cats_15_no_lo')\nmarker = specs.pop('marker')", "_____no_output_____" ] ], [ [ "## Downselect scenario ensemble to categories of interest for this assessment", "_____no_output_____" ] ], [ [ "sr1p5.meta.rename(columns={'Kyoto-GHG|2010 (SAR)': 'kyoto_ghg_2010'}, inplace=True)", "_____no_output_____" ], [ "df = sr1p5.filter(category=cats_15)", "_____no_output_____" ] ], [ [ "## Global carbon dioxide emissions in four illustrative pathways\n\nFigure SPM3b shows the contribution to CO2 emissions and removal by three categories in the four illustrative pathways.\n\nThis illustration does not use the emissions timeseries as reported by the models. This is because the variable `Emissions|CO2|Energy and Industrial Processes` represents net emissions, incorporating carbon dioxide removal in this sector.\n\nThe steps below compute the gross emissions. The long variable names are mapped to short variables for easier readibility.", "_____no_output_____" ] ], [ [ "afolu_var = 'Emissions|CO2|AFOLU'\nene_ind_var = 'Emissions|CO2|Energy and Industrial Processes'\nbeccs_var ='Carbon Sequestration|CCS|Biomass'", "_____no_output_____" ] ], [ [ "We downselect the entire data to the four illustrative pathways (`marker` scenarios) and the three variables of interest. For consistency with the figure in the SPM, the units are converted to Gt CO2.", "_____no_output_____" ] ], [ [ "pw = df.filter(marker=marker, variable=[afolu_var, ene_ind_var, beccs_var],\n year=range(2010, 2101, 10))", "_____no_output_____" ], [ "pw.convert_unit('Mt CO2/yr', 'Gt CO2/yr', inplace=True)", "_____no_output_____" ] ], [ [ "As a first step, we extract the timeseries for the AFOLU emissions and rename the variable for brevity. This data will be used as is in this figure.", "_____no_output_____" ] ], [ [ "afolu = (\n pw.filter(variable=afolu_var)\n .rename(variable={afolu_var: 'AFOLU'})\n)", "_____no_output_____" ] ], [ [ "The energy-and-industry and BECCS timeseries data needs some processing. It is first separated into two distinct dataframes, and the BECCS variable is renamed for brevity.", "_____no_output_____" ] ], [ [ "ene_ind = pw.filter(variable=ene_ind_var)", "_____no_output_____" ], [ "beccs = (\n pw.filter(variable=beccs_var)\n .rename(variable={beccs_var: 'BECCS'})\n)", "_____no_output_____" ] ], [ [ "The variable `Carbon Sequestration|CCS|Biomass` reports removed carbon dioxide as positive values. For use in this figure, the sign needs to be reversed.", "_____no_output_____" ] ], [ [ "beccs.data.value = - beccs.data.value", "_____no_output_____" ] ], [ [ "The `LED` marker scenario does not use any BECCS by assumption of the scenario design. For this reason, the variable `Carbon Sequestration|CCS|Biomass` was not defined when the MESSAGE team submitted the scenario results to the IAMC 1.5°C Scenario Data ensemble.\n\nFor easier computation, we add this data series manually here.", "_____no_output_____" ] ], [ [ "years = beccs.timeseries().columns", "_____no_output_____" ], [ "beccs.append(\n pyam.IamDataFrame(\n pd.DataFrame([0] * len(years), index=years).T,\n model='MESSAGEix-GLOBIOM 1.0', scenario='LowEnergyDemand',\n region='World', variable='BECCS', unit='Gt CO2/yr'),\n inplace=True\n)", "_____no_output_____" ] ], [ [ "As a third step, we compute the difference between net CO2 emissions from the energy sector & industry and BECCS to obtain gross CO2 emissions in that sector.", "_____no_output_____" ] ], [ [ "def get_value(df):\n cols = ['model', 'scenario', 'region', 'year', 'unit']\n return df.data.set_index(cols)['value']", "_____no_output_____" ], [ "diff = get_value(ene_ind) - get_value(beccs)", "_____no_output_____" ], [ "ene_ind_gross = pyam.IamDataFrame(diff, variable='Fossil fuel and industry')", "_____no_output_____" ] ], [ [ "We now combine the three contribution dataframes into one joint dataframe for plotting. Because the `beccs` IamDataFrame was partially altered, concatenating directly causes an issue, so we remove all `meta` columns from that dataframe beforehand.", "_____no_output_____" ] ], [ [ "beccs.meta = beccs.meta.drop(columns=beccs.meta.columns)", "_____no_output_____" ], [ "co2 = pyam.concat([ene_ind_gross, afolu, beccs])", "_____no_output_____" ] ], [ [ "We now proceed to plot the four illustrative pathways.", "_____no_output_____" ] ], [ [ "fig, ax = plt.subplots(1, 4, figsize=(14, 4), sharey=True)\n\nfor i, m in enumerate(['LED', 'S1', 'S2', 'S5']):\n co2.filter(marker=m).stack_plot(ax=ax[i], total=True, legend=False)\n ax[i].title.set_text(m)\n\nax[3].legend(loc=1)", "_____no_output_____" ] ], [ [ "## Collecting indicators across illustrative pathways\n\n### Initialize a `pyam.Statistics` instance", "_____no_output_____" ] ], [ [ "base_year = 2010\ncompare_years = [2030, 2050]\nyears = [base_year] + compare_years", "_____no_output_____" ], [ "stats = pyam.Statistics(df=df, groupby={'marker': ['LED', 'S1', 'S2', 'S5']},\n filters=[(('pathways', 'no & lo os 1.5'), {'category': cats_15_no_lo})])", "_____no_output_____" ] ], [ [ "### CO2 and Kyoto GHG emissions reductions", "_____no_output_____" ] ], [ [ "co2 = (\n df.filter(kyoto_ghg_2010='in range', variable='Emissions|CO2', year=years)\n .convert_unit('Mt CO2/yr', 'Gt CO2/yr')\n .timeseries()\n)", "_____no_output_____" ], [ "for y in compare_years:\n stats.add((co2[y] / co2[2010] - 1) * 100,\n 'CO2 emission reduction (% relative to 2010)',\n subheader=y)", "_____no_output_____" ], [ "kyoto_ghg = (\n df.filter(kyoto_ghg_2010='in range', variable='Emissions|Kyoto Gases (SAR-GWP100)', year=years)\n .rename(unit={'Mt CO2-equiv/yr': 'Mt CO2e/yr'})\n .convert_unit('Mt CO2e/yr','Gt CO2e/yr')\n .timeseries()\n)\nfor y in compare_years:\n stats.add((kyoto_ghg[y] / kyoto_ghg[base_year] - 1) * 100,\n 'Kyoto-GHG emission reduction (SAR-GWP100), % relative to {})'.format(base_year),\n subheader=y)", "_____no_output_____" ] ], [ [ "### Final energy demand reduction relative to 2010", "_____no_output_____" ] ], [ [ "fe = df.filter(variable='Final Energy', year=years).timeseries()\nfor y in compare_years:\n stats.add((fe[y] / fe[base_year] - 1) * 100,\n 'Final energy demand reduction relative to {} (%)'.format(base_year),\n subheader=y)", "_____no_output_____" ] ], [ [ "### Share of renewables in electricity generation", "_____no_output_____" ] ], [ [ "def add_stats_share(stats, var_list, name, total, total_name, years, df=df):\n\n _df = df.filter(variable=var_list)\n for v in var_list:\n _df.require_variable(v, exclude_on_fail=True)\n _df.filter(exclude=False, inplace=True)\n\n component = (\n _df.timeseries()\n .groupby(['model', 'scenario']).sum()\n )\n share = component / total * 100\n \n for y in years:\n stats.add(share[y], header='Share of {} in {} (%)'.format(name, total_name),\n subheader=y)", "_____no_output_____" ], [ "ele = df.filter(variable='Secondary Energy|Electricity', year=compare_years).timeseries()\nele.index = ele.index.droplevel([2, 3, 4])", "_____no_output_____" ], [ "ele_re_vars = [\n 'Secondary Energy|Electricity|Biomass',\n 'Secondary Energy|Electricity|Non-Biomass Renewables'\n]\nadd_stats_share(stats, ele_re_vars, 'renewables', ele, 'electricity', compare_years)", "_____no_output_____" ] ], [ [ "### Changes in primary energy mix", "_____no_output_____" ] ], [ [ "mapping = [\n ('coal', 'Coal'),\n ('oil', 'Oil'),\n ('gas', 'Gas'),\n ('nuclear', 'Nuclear'),\n ('bioenergy', 'Biomass'),\n ('non-biomass renewables', 'Non-Biomass Renewables')\n]", "_____no_output_____" ], [ "for (n, v) in mapping:\n data = df.filter(variable='Primary Energy|{}'.format(v), year=years).timeseries()\n\n for y in compare_years:\n stats.add((data[y] / data[base_year] - 1) * 100,\n header='Primary energy from {} (% rel to {})'.format(n, base_year),\n subheader=y)", "_____no_output_____" ] ], [ [ "### Cumulative carbon capture and sequestration until the end of the century", "_____no_output_____" ] ], [ [ "def cumulative_ccs(variable, name, first_year=2016, last_year=2100):\n\n data = (\n df.filter(variable=variable)\n .convert_unit('Mt CO2/yr', 'Gt CO2/yr')\n .timeseries()\n )\n \n stats.add(\n data.apply(pyam.cumulative, raw=False, axis=1,\n first_year=first_year, last_year=last_year),\n header='Cumulative {} until {} (GtCO2)'.format(name, last_year), subheader='')", "_____no_output_____" ], [ "cumulative_ccs('Carbon Sequestration|CCS', 'CCS')", "_____no_output_____" ], [ "cumulative_ccs('Carbon Sequestration|CCS|Biomass', 'BECCS')", "_____no_output_____" ] ], [ [ "### Land cover for energy crops\n\nConvert unit to SI unit (million square kilometers).", "_____no_output_____" ] ], [ [ "energy_crops = (\n df.filter(variable='Land Cover|Cropland|Energy Crops', year=2050)\n .convert_unit('million ha', 'million km2', factor=0.01)\n .timeseries()\n)", "_____no_output_____" ], [ "stats.add(energy_crops[2050], header='Land are for energy crops (million km2)')", "_____no_output_____" ] ], [ [ "### Emissions from land use", "_____no_output_____" ] ], [ [ "species = ['CH4', 'N2O']", "_____no_output_____" ], [ "for n in species:\n data = df.filter(kyoto_ghg_2010='in range', variable='Emissions|{}|AFOLU'.format(n), year=years).timeseries()\n\n for y in compare_years:\n stats.add((data[y] / data[base_year] - 1) * 100,\n header='Agricultural {} emissions (% rel to {})'.format(n, base_year),\n subheader=y)", "_____no_output_____" ] ], [ [ "## Display summary statistics and export to `xlsx`", "_____no_output_____" ] ], [ [ "summary = stats.summarize(interquartile=True, custom_format='{:.0f}').T\nsummary", "_____no_output_____" ], [ "summary.to_excel('output/spm_sr15_figure3b_indicators_table.xlsx')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
4a006818f34fd4b8cd7a3eb74546c7928a0b38ec
50,575
ipynb
Jupyter Notebook
Crop yield Analysis .ipynb
Akki1295/crop-yield-prediction
d4154fcab0ed706ae156a28080c4974d3746f379
[ "Apache-2.0" ]
null
null
null
Crop yield Analysis .ipynb
Akki1295/crop-yield-prediction
d4154fcab0ed706ae156a28080c4974d3746f379
[ "Apache-2.0" ]
null
null
null
Crop yield Analysis .ipynb
Akki1295/crop-yield-prediction
d4154fcab0ed706ae156a28080c4974d3746f379
[ "Apache-2.0" ]
null
null
null
93.657407
35,740
0.80261
[ [ [ "<h1 align=center><font size = 6> Crop Yield Prediction. </font></h1>", "_____no_output_____" ], [ "## import required libraries. ", "_____no_output_____" ] ], [ [ "import numpy as np #Library to handle data in vectorized manner.\nimport pandas as pd #library for data analysis.\n\n#Plotting libray matplotlib and associated ploting modules.\nimport matplotlib.pyplot as plt\nimport matplotlib.cm as cm\nimport matplotlib.colors as colors\n\n\n%matplotlib inline\nprint('All the required libraries are imported.....!')", "All the required libraries are imported.....!\n" ] ], [ [ "## Initial data gathering and cleaning.", "_____no_output_____" ], [ "### read crop production dataset into pandas dataframe.", "_____no_output_____" ] ], [ [ "\ndf_crop = pd.read_csv(\"crop_production.csv\")\n\n# Now look at the data frame. \ndf_crop.head()", "_____no_output_____" ] ], [ [ "#### replace blank cells with nan and remove these nan values.", "_____no_output_____" ] ], [ [ "nan_value = float(\"NaN\")\ndf_crop.replace(\"\", nan_value, inplace=True)\ndf_crop.dropna(subset = [\"District_name\"], inplace=True)\nprint(\"Drop Sucessfull\")\n#now look at the shape of our data frame.\nprint(\"Size of the data frame is :\", df_crop.shape)", "Drop Sucessfull\nSize of the data frame is : (33, 11)\n" ] ], [ [ "\n### Crop production data analysis.", "_____no_output_____" ] ], [ [ "# assign data in new dataframe for analysis.\ndf_analysis = df_crop\n", "_____no_output_____" ], [ "#Set the district name as index - useful for quickly looking up district using .loc method\ndf_analysis.set_index('District_name' , inplace = True)\n\n#now see how data frame looks.\ndf_analysis.head()", "_____no_output_____" ], [ "length = len(df_analysis)\nlength", "_____no_output_____" ] ], [ [ "### Lets analyze the crop production data of each district where district name choosed by user.", "_____no_output_____" ], [ "Please Enter the district name in which you want to analyze crop data in the input field, use the district name from following list.<br>\n[Ahemadnagar Akola Amravati Aurangabad Beed Bhandara Buldhana Chandrapur Dhule Gadchiroli Gondia Hingoli Jalgaon Jalna Kolhapur Latur Nagpur Nanded Nandurbar Nashik Osmanabad Parbhani Pune Raigad Ratnagiri Sangli Satara Sindhudurg Solapur Thane Wardha Washim Yavatmal\n]<br>\n\nNote : Input field is case sensitive.", "_____no_output_____" ] ], [ [ "value = input(\"Enter the District Name :\")\n\ndf_data = df_analysis.loc[[value]] #assign the data of district which is selected by user input to the new dataframe.\n\ndf_data.reset_index(drop=True, inplace=True) # drop the index.\n#transpose dataframe.\ndf_transposed = df_data.T\n\n#rename the column name.\ndf_transposed.rename(columns={ 0: 'Production'}, inplace=True)\n\n#now lets plot the dtataframe into bar graph.\ndf_transposed.plot(kind=\"bar\", figsize = (16,14), fontsize = 12, color = 'rgbkymc') \nplt.title(\"Distrcit Wise Crop Production In Maharashtra State\", fontsize = 16)\nplt.xlabel(\"Different Crops of District :\" + value, fontsize = 16)\nplt.ylabel(\"Crop Production in Tonns\", fontsize = 16)\n\nplt.savefig('yieldanalysis.png', format= 'png')\nplt.show()\n\n\n", "Enter the District Name : Satara\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ] ]
4a0073e537cb391da51f7af51c2b928ce464282e
73,886
ipynb
Jupyter Notebook
examples/perturbation-analysis.ipynb
aykol/CoordinationNumbers
050c79e0f52e4e885b0c1b56a35a256282f91ebf
[ "BSD-3-Clause-LBNL" ]
12
2019-10-18T18:16:06.000Z
2021-03-22T01:27:07.000Z
examples/perturbation-analysis.ipynb
aykol/CoordinationNumbers
050c79e0f52e4e885b0c1b56a35a256282f91ebf
[ "BSD-3-Clause-LBNL" ]
40
2019-10-18T23:13:00.000Z
2022-03-28T09:04:53.000Z
examples/perturbation-analysis.ipynb
hackingmaterials/MaterialsCoord
050c79e0f52e4e885b0c1b56a35a256282f91ebf
[ "BSD-3-Clause-LBNL" ]
3
2018-06-21T00:42:34.000Z
2019-09-30T20:32:33.000Z
290.889764
65,716
0.92414
[ [ [ "# MaterialsCoord benchmarking – sensitivity to perturbation analysis\n\nThis notebook demonstrates how to use MaterialsCoord to benchmark the sensitivity of bonding algorithms to structural perturbations. Perturbations are introduced according the Einstein crystal test rig, in which site is perturbed so that the distribution around the equilibrium position yields a normal distribution for each Cartesian component.\n\nThe perturbation complies thus with the expectation for an Einstein crystal,\nin which the potential is given by $V(\\delta r) = 0.5 k_\\mathrm{spring} \\delta r^2$, where\n$k_\\mathrm{spring}$ denotes the spring constant with which the sites are tethered to\ntheir equilibrium position, and $\\delta r$ is the distance of the site under\nconsideration from its equilibrium position.\n\nThe MaterialsCoord `Benchmark` class accepts a `perturb_sigma` option, which is equal to $(k_\\mathrm{B}T/k_\\mathrm{spring})^{0.5}$.\n\n*Written using:*\n- MaterialsCoord==0.1.0\n\n*Authors: Hillary Pan, Alex Ganose (10/12/19)*\n\n---\n\nFirst, lets initialize the near neighbor methods we are interested in.", "_____no_output_____" ] ], [ [ "from pymatgen.analysis.local_env import BrunnerNN_reciprocal, EconNN, JmolNN, \\\n MinimumDistanceNN, MinimumOKeeffeNN, MinimumVIRENN, \\\n VoronoiNN, CrystalNN\n\nnn_methods = [\n BrunnerNN_reciprocal(), EconNN(tol=0.5), JmolNN(), CrystalNN(), VoronoiNN(tol=0.5),\n MinimumDistanceNN(), MinimumOKeeffeNN(), MinimumVIRENN()\n]", "_____no_output_____" ] ], [ [ "Next, import the benchmark and choose which structures we are interested in.", "_____no_output_____" ] ], [ [ "from materialscoord.core import Benchmark\n\nstructure_groups = [\"common_binaries\", \"elemental\", \"A2BX4\", \"ABX3\", \"ABX4\"]", "_____no_output_____" ] ], [ [ "Choose the initial and final perturbation sigma values to include, as well as the number of steps inbetween.", "_____no_output_____" ] ], [ [ "import numpy as np\n\ninitial_sigma = 0\nfinal_sigma = 0.2\nnsteps = 51\n\nsigmas = np.linspace(initial_sigma, final_sigma, nsteps)", "_____no_output_____" ] ], [ [ "Run the benchmark with the perturbation turned on. Note we have disabled symmetry so that each perturbed site is treated separately. Due to the absence of symmetry and the slow speed of `MinimumVIRENN`, this can take a long time (14 hours on a 2017 MacBook Pro).", "_____no_output_____" ] ], [ [ "from tqdm import tqdm_notebook\n\nresults = []\nfor sigma in tqdm_notebook(sigmas):\n bm = Benchmark.from_structure_group(structure_groups, perturb_sigma=sigma, symprec=None)\n sigma_scores = bm.score(nn_methods)\n results.append(sigma_scores.iloc[-1].values)", "<ipython-input-4-9e2057c5afdc>:4: TqdmDeprecationWarning: This function will be removed in tqdm==5.0.0\nPlease use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`\n for sigma in tqdm_notebook(sigmas):\n" ] ], [ [ "Finally, plot the results.", "_____no_output_____" ] ], [ [ "%matplotlib inline\nimport matplotlib.pyplot as plt\nfrom matplotlib import ticker\nimport os\nfrom scipy.signal import savgol_filter\nimport seaborn as sns\n\n\nplt_results = np.array(results).T\n\n# define matplotlib style settings\nstyle = {\n \"font.sans-serif\": [\"Helvetica\", \"Arial\"], \"axes.labelsize\": 16,\n \"xtick.labelsize\": 16, \"ytick.labelsize\": 16, \"xtick.direction\": \"in\",\n \"ytick.direction\": \"in\", \"xtick.major.size\": 8, \"xtick.minor.size\": 4,\n \"ytick.major.size\": 8, \"ytick.minor.size\": 4, \"lines.linewidth\": 2.5,\n \"lines.markersize\": 10, \"axes.linewidth\": 1.2, \"xtick.major.width\": 1.2,\n \"xtick.minor.width\": 1.2, \"ytick.major.width\": 1.2, \"ytick.minor.width\": 1.2,\n \"pdf.fonttype\":42\n}\n\nnn_method_mapping = {\"BrunnerNN_reciprocal\": \"BrunnerNN\"}\ncolors = sns.color_palette(\"deep\")\norder = [5, 6, 7, 2, 1, 0, 4, 3]\n\nplt.style.use(style)\nfig = plt.figure(figsize=(6, 6))\nax = plt.gca()\n\nfor i, x in enumerate(order):\n method = nn_methods[x]\n y_vals = plt_results[x]\n \n name = method.__class__.__name__\n c = colors[i]\n name = nn_method_mapping.get(name, name)\n \n # smooth the lines with a double pass through a savgol filter\n # more ideal would be to take averages accross multiple runs\n # but due to the time taken to generate the data this is impractical\n y_vals = savgol_filter(y_vals, 27, 2)\n y_vals = savgol_filter(y_vals, 27, 2)\n\n ax.plot(sigmas, y_vals, label=name, c=c)\n\nax.set(ylabel=\"Benchmark score\", xlabel=\"Sigma (Å)\")\nax.set_xlim((0, 0.2))\n\nax.yaxis.set_major_locator(ticker.MaxNLocator(5))\n\nplt.legend(loc='upper left', bbox_to_anchor=(1, 1), frameon=False, fontsize=15)\nplt.savefig(os.path.join(\"plots\", \"perturbation-tolerance.pdf\"), bbox_inches=\"tight\")\nplt.show()", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
4a009d329a1888776a13cc411e4660badb42fe88
21,229
ipynb
Jupyter Notebook
hw4/Testing.ipynb
daggertye/CS294_homework
4905e2622e1c7e4d2bde343da139333b3dbecc93
[ "MIT" ]
41
2018-06-27T10:07:54.000Z
2020-12-08T14:20:11.000Z
hw4/Testing.ipynb
louaaron/CS294_homework
4905e2622e1c7e4d2bde343da139333b3dbecc93
[ "MIT" ]
2
2018-06-27T14:24:11.000Z
2019-09-05T05:19:10.000Z
hw4/Testing.ipynb
daggertye/CS294_homework
4905e2622e1c7e4d2bde343da139333b3dbecc93
[ "MIT" ]
18
2018-09-01T04:39:39.000Z
2020-11-16T21:06:22.000Z
47.598655
9,790
0.740449
[ [ [ "import numpy as np\nimport tensorflow as tf\nimport gym\nfrom cheetah_env import HalfCheetahEnvNew", "/anaconda3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6\n return f(*args, **kwds)\n" ], [ "env = gym.make('HalfCheetah-v2')", "[2018-06-25 08:39:43,396] Making new env: HalfCheetah-v2\n" ], [ "actions = env.action_space", "_____no_output_____" ], [ "actions.shape", "_____no_output_____" ], [ "obs = env.observation_space", "_____no_output_____" ], [ "obs.shape", "_____no_output_____" ], [ "actions.sample()", "_____no_output_____" ], [ "x = np.array([[0,0,0], [1,1,1]])", "_____no_output_____" ], [ "x[0][0] = 1", "_____no_output_____" ], [ "x[0]", "_____no_output_____" ], [ "action = [env.action_space.sample() for _ in range(10)]", "_____no_output_____" ], [ "np.array(action).shape", "_____no_output_____" ], [ "arr1 = [1, 0, 0]\narr2 = [arr1 for _ in range(5)]", "_____no_output_____" ], [ "arr2[0][0] = 0", "_____no_output_____" ], [ "arr2[1][0]", "_____no_output_____" ], [ "np.concatenate([[[0,1,2,3],[0,1]] for _ in range(10)])", "_____no_output_____" ], [ "import matplotlib.pyplot as plt", "_____no_output_____" ], [ "plt.scatter([1,2,3], [2,3,4])\nplt.title(\"State vs action\")\nplt.show()", "_____no_output_____" ], [ "plt.clf()", "_____no_output_____" ], [ "plt.scatter(np.array([2,3,4]), [1,2,3])", "_____no_output_____" ], [ "arr = np.array([[0,0,0],[0,0,0]])\narr[:, 2]", "_____no_output_____" ], [ "from gym.envs.mujoco.half_cheetah import HalfCheetahEnv", "_____no_output_____" ], [ "newenv = HalfCheetahEnv()", "_____no_output_____" ], [ "newenv.observation_space.shape", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
4a00bd907c08b9e84eaf9018ff5a0b404b29f154
113,167
ipynb
Jupyter Notebook
IntroDataScience/ejercicios/18/imagen-kmeans.ipynb
aess14/Cursos-Uniandes
be016b25f2f49788235fbe91ec577fd16b9ad613
[ "MIT" ]
null
null
null
IntroDataScience/ejercicios/18/imagen-kmeans.ipynb
aess14/Cursos-Uniandes
be016b25f2f49788235fbe91ec577fd16b9ad613
[ "MIT" ]
null
null
null
IntroDataScience/ejercicios/18/imagen-kmeans.ipynb
aess14/Cursos-Uniandes
be016b25f2f49788235fbe91ec577fd16b9ad613
[ "MIT" ]
null
null
null
577.382653
62,484
0.949323
[ [ [ "import glob\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport sklearn.cluster\n%matplotlib inline", "_____no_output_____" ], [ "data = plt.imread(\"70.jpg\")\nplt.imshow(data)", "_____no_output_____" ], [ "print(np.shape(data))", "(100, 100, 3)\n" ], [ "X = data.reshape((-1,3))\nprint(np.shape(X))", "(10000, 3)\n" ], [ "n_clusters = 6\nk_means = sklearn.cluster.KMeans(n_clusters=n_clusters)\nk_means.fit(X)", "_____no_output_____" ], [ "# calculo a cual cluster pertenece cada pixel\ncluster = k_means.predict(X)\n\n# asigno a cada pixel el lugar del centro de su cluster\nX_centered = X.copy()\nfor i in range(n_clusters):\n ii = cluster==i\n X_centered[ii,:] = np.int_(k_means.cluster_centers_[i])\n\n# devuelvo los datos a las dimensiones originales de la imagen\ndata_centered = X_centered.reshape((100,100,3))", "_____no_output_____" ], [ "plt.figure(figsize=(10,5))\nplt.subplot(1,2,1)\nplt.imshow(data)\nplt.title(\"Original\")\n\nplt.subplot(1,2,2)\nplt.imshow(data_centered)\nplt.title(\"{} colores\".format(n_clusters))", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code" ] ]
4a00c3b2b6c00981e41efab354ad9501fc634fa2
243,675
ipynb
Jupyter Notebook
t81_558_class_12_02_qlearningreinforcement.ipynb
tenyi257/t81_558_deep_learning
b2fffd1b89d3d37adf4c9d82c4cbc991f871f3a3
[ "Apache-2.0" ]
5
2021-03-16T10:10:18.000Z
2021-03-16T10:10:26.000Z
t81_558_class_12_02_qlearningreinforcement.ipynb
tenyi257/t81_558_deep_learning
b2fffd1b89d3d37adf4c9d82c4cbc991f871f3a3
[ "Apache-2.0" ]
null
null
null
t81_558_class_12_02_qlearningreinforcement.ipynb
tenyi257/t81_558_deep_learning
b2fffd1b89d3d37adf4c9d82c4cbc991f871f3a3
[ "Apache-2.0" ]
2
2021-12-05T21:10:17.000Z
2022-02-12T08:29:21.000Z
164.423077
61,812
0.861783
[ [ [ "<a href=\"https://colab.research.google.com/github/jeffheaton/t81_558_deep_learning/blob/master/t81_558_class_12_02_qlearningreinforcement.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# T81-558: Applications of Deep Neural Networks\n**Module 12: Reinforcement Learning**\n* Instructor: [Jeff Heaton](https://sites.wustl.edu/jeffheaton/), McKelvey School of Engineering, [Washington University in St. Louis](https://engineering.wustl.edu/Programs/Pages/default.aspx)\n* For more information visit the [class website](https://sites.wustl.edu/jeffheaton/t81-558/).", "_____no_output_____" ], [ "# Module 12 Video Material\n\n* Part 12.1: Introduction to the OpenAI Gym [[Video]](https://www.youtube.com/watch?v=_KbUxgyisjM&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_12_01_ai_gym.ipynb)\n* **Part 12.2: Introduction to Q-Learning** [[Video]](https://www.youtube.com/watch?v=A3sYFcJY3lA&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_12_02_qlearningreinforcement.ipynb)\n* Part 12.3: Keras Q-Learning in the OpenAI Gym [[Video]](https://www.youtube.com/watch?v=qy1SJmsRhvM&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_12_03_keras_reinforce.ipynb)\n* Part 12.4: Atari Games with Keras Neural Networks [[Video]](https://www.youtube.com/watch?v=co0SwPWoZh0&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_12_04_atari.ipynb)\n* Part 12.5: Application of Reinforcement Learning [[Video]](https://www.youtube.com/watch?v=1jQPP3RfwMI&list=PLjy4p-07OYzulelvJ5KVaT2pDlxivl_BN) [[Notebook]](t81_558_class_12_05_apply_rl.ipynb)\n", "_____no_output_____" ], [ "# Google CoLab Instructions\n\nThe following code ensures that Google CoLab is running the correct version of TensorFlow.", "_____no_output_____" ] ], [ [ "try:\n from google.colab import drive\n %tensorflow_version 2.x\n COLAB = True\n print(\"Note: using Google CoLab\")\nexcept:\n print(\"Note: not using Google CoLab\")\n COLAB = False", "Note: using Google CoLab\n" ], [ "if COLAB:\n !sudo apt-get install -y xvfb ffmpeg x11-utils\n !pip install -q 'gym==0.10.11'\n !pip install -q 'imageio==2.4.0'\n !pip install -q PILLOW\n !pip install -q 'pyglet==1.3.2'\n !pip install -q pyvirtualdisplay\n !pip install -q tf-agents", "Reading package lists... Done\nBuilding dependency tree \nReading state information... Done\nx11-utils is already the newest version (7.7+3build1).\nffmpeg is already the newest version (7:3.4.6-0ubuntu0.18.04.1).\nxvfb is already the newest version (2:1.19.6-1ubuntu4.4).\n0 upgraded, 0 newly installed, 0 to remove and 31 not upgraded.\n" ] ], [ [ "# Part 12.2: Introduction to Q-Learning\n\nQ-Learning is a foundational technique upon which deep reinforcement learning is based. Before we explore deep reinforcement learning, it is essential to understand Q-Learning. Several components make up any Q-Learning system.\n\n* **Agent** - The agent is an entity that exists in an environment that takes actions to affect the state of the environment, to receive rewards.\n* **Environment** - The environment is the universe that the agent exists in. The environment is always in a specific state that is changed by the actions of the agent.\n* **Actions** - Steps that can be performed by the agent to alter the environment \n* **Step** - A step occurs each time that the agent performs an action and potentially changes the environment state.\n* **Episode** - A chain of steps that ultimately culminates in the environment entering a terminal state.\n* **Epoch** - A training iteration of the agent that contains some number of episodes.\n* **Terminal State** - A state in which further actions do not make sense. In many environments, a terminal state occurs when the agent has one, lost, or the environment exceeding the maximum number of steps.\n\nQ-Learning works by building a table that suggests an action for every possible state. This approach runs into several problems. First, the environment is usually composed of several continuous numbers, resulting in an infinite number of states. Q-Learning handles continuous states by binning these numeric values into ranges. \n\nAdditionally, Q-Learning primarily deals with discrete actions, such as pressing a joystick up or down. Out of the box, Q-Learning does not deal with continuous inputs, such as a car's accelerator that can be in a range of positions from released to fully engaged. Researchers have come up with clever tricks to allow Q-Learning to accommodate continuous actions.\n\nIn the next chapter, we will learn more about deep reinforcement learning. Deep neural networks can help to solve the problems of continuous environments and action spaces. For now, we will apply regular Q-Learning to the Mountain Car problem from OpenAI Gym.\n\n### Introducing the Mountain Car\n\nThis section will demonstrate how Q-Learning can create a solution to the mountain car gym environment. The Mountain car is an environment where a car must climb a mountain. Because gravity is stronger than the car's engine, even with full throttle, it cannot merely accelerate up the steep slope. The vehicle is situated in a valley and must learn to utilize potential energy by driving up the opposite hill before the car can make it to the goal at the top of the rightmost hill.\n\nFirst, it might be helpful to visualize the mountain car environment. The following code shows this environment. This code makes use of TF-Agents to perform this render. Usually, we use TF-Agents for the type of deep reinforcement learning that we will see in the next module. However, for now, TF-Agents is just used to render the mountain care environment.", "_____no_output_____" ] ], [ [ "import tf_agents\nfrom tf_agents.environments import suite_gym\nimport PIL.Image\nimport pyvirtualdisplay\n\ndisplay = pyvirtualdisplay.Display(visible=0, size=(1400, 900)).start()\n\nenv_name = 'MountainCar-v0'\nenv = suite_gym.load(env_name)\nenv.reset()\nPIL.Image.fromarray(env.render())", "_____no_output_____" ] ], [ [ "The mountain car environment provides the following discrete actions:\n\n* 0 - Apply left force\n* 1 - Apply no force\n* 2 - Apply right force\n\nThe mountain car environment is made up of the following continuous values:\n\n* state[0] - Position \n* state[1] - Velocity\n\nThe following code shows an agent that applies full throttle to climb the hill. The cart is not strong enough. It will need to use potential energy from the mountain behind it.", "_____no_output_____" ] ], [ [ "import gym\nfrom gym.wrappers import Monitor\nimport glob\nimport io\nimport base64\nfrom IPython.display import HTML\nfrom pyvirtualdisplay import Display\nfrom IPython import display as ipythondisplay\n\ndisplay = Display(visible=0, size=(1400, 900))\ndisplay.start()\n\n\"\"\"\nUtility functions to enable video recording of gym environment \nand displaying it.\nTo enable video, just do \"env = wrap_env(env)\"\"\n\"\"\"\n\ndef show_video():\n mp4list = glob.glob('video/*.mp4')\n if len(mp4list) > 0:\n mp4 = mp4list[0]\n video = io.open(mp4, 'r+b').read()\n encoded = base64.b64encode(video)\n ipythondisplay.display(HTML(data='''<video alt=\"test\" autoplay \n loop controls style=\"height: 400px;\">\n <source src=\"data:video/mp4;base64,{0}\" type=\"video/mp4\" />\n </video>'''.format(encoded.decode('ascii'))))\n else: \n print(\"Could not find video\")\n \n\ndef wrap_env(env):\n env = Monitor(env, './video', force=True)\n return env", "_____no_output_____" ], [ "import gym\n\nif COLAB:\n env = wrap_env(gym.make(\"MountainCar-v0\"))\nelse:\n env = gym.make(\"MountainCar-v0\")\n\nenv.reset()\ndone = False\n\ni = 0\nwhile not done:\n i += 1\n state, reward, done, _ = env.step(2)\n env.render()\n print(f\"Step {i}: State={state}, Reward={reward}\")\n \nenv.close()", "Step 1: State=[-0.55754837 0.00126361], Reward=-1.0\nStep 2: State=[-0.55503058 0.00251779], Reward=-1.0\nStep 3: State=[-0.5512774 0.00375318], Reward=-1.0\nStep 4: State=[-0.54631687 0.00496053], Reward=-1.0\nStep 5: State=[-0.54018608 0.00613078], Reward=-1.0\nStep 6: State=[-0.53293095 0.00725514], Reward=-1.0\nStep 7: State=[-0.52460583 0.00832512], Reward=-1.0\nStep 8: State=[-0.51527315 0.00933267], Reward=-1.0\nStep 9: State=[-0.50500292 0.01027024], Reward=-1.0\nStep 10: State=[-0.49387208 0.01113084], Reward=-1.0\nStep 11: State=[-0.48196389 0.01190819], Reward=-1.0\nStep 12: State=[-0.46936715 0.01259674], Reward=-1.0\nStep 13: State=[-0.45617536 0.01319179], Reward=-1.0\nStep 14: State=[-0.44248581 0.01368956], Reward=-1.0\nStep 15: State=[-0.42839861 0.01408719], Reward=-1.0\nStep 16: State=[-0.41401575 0.01438286], Reward=-1.0\nStep 17: State=[-0.39944004 0.01457571], Reward=-1.0\nStep 18: State=[-0.38477414 0.0146659 ], Reward=-1.0\nStep 19: State=[-0.37011958 0.01465456], Reward=-1.0\nStep 20: State=[-0.35557587 0.01454371], Reward=-1.0\nStep 21: State=[-0.34123964 0.01433623], Reward=-1.0\nStep 22: State=[-0.3272039 0.01403574], Reward=-1.0\nStep 23: State=[-0.31355736 0.01364654], Reward=-1.0\nStep 24: State=[-0.30038394 0.01317342], Reward=-1.0\nStep 25: State=[-0.28776228 0.01262165], Reward=-1.0\nStep 26: State=[-0.27576549 0.0119968 ], Reward=-1.0\nStep 27: State=[-0.26446086 0.01130463], Reward=-1.0\nStep 28: State=[-0.25390983 0.01055103], Reward=-1.0\nStep 29: State=[-0.24416791 0.00974192], Reward=-1.0\nStep 30: State=[-0.23528474 0.00888317], Reward=-1.0\nStep 31: State=[-0.22730422 0.00798052], Reward=-1.0\nStep 32: State=[-0.22026462 0.0070396 ], Reward=-1.0\nStep 33: State=[-0.21419878 0.00606584], Reward=-1.0\nStep 34: State=[-0.2091343 0.00506448], Reward=-1.0\nStep 35: State=[-0.20509371 0.00404059], Reward=-1.0\nStep 36: State=[-0.20209464 0.00299907], Reward=-1.0\nStep 37: State=[-0.20015001 0.00194464], Reward=-1.0\nStep 38: State=[-0.19926807 0.00088193], Reward=-1.0\nStep 39: State=[-1.99452573e-01 -1.84499834e-04], Reward=-1.0\nStep 40: State=[-0.20070273 -0.00125015], Reward=-1.0\nStep 41: State=[-0.20301324 -0.00231051], Reward=-1.0\nStep 42: State=[-0.20637425 -0.00336101], Reward=-1.0\nStep 43: State=[-0.21077122 -0.00439698], Reward=-1.0\nStep 44: State=[-0.21618486 -0.00541363], Reward=-1.0\nStep 45: State=[-0.22259088 -0.00640603], Reward=-1.0\nStep 46: State=[-0.22995992 -0.00736903], Reward=-1.0\nStep 47: State=[-0.23825726 -0.00829734], Reward=-1.0\nStep 48: State=[-0.24744271 -0.00918545], Reward=-1.0\nStep 49: State=[-0.25747039 -0.01002769], Reward=-1.0\nStep 50: State=[-0.26828866 -0.01081826], Reward=-1.0\nStep 51: State=[-0.27983994 -0.01155128], Reward=-1.0\nStep 52: State=[-0.29206078 -0.01222083], Reward=-1.0\nStep 53: State=[-0.30488184 -0.01282106], Reward=-1.0\nStep 54: State=[-0.31822807 -0.01334624], Reward=-1.0\nStep 55: State=[-0.33201897 -0.0137909 ], Reward=-1.0\nStep 56: State=[-0.34616892 -0.01414994], Reward=-1.0\nStep 57: State=[-0.36058763 -0.01441871], Reward=-1.0\nStep 58: State=[-0.37518077 -0.01459314], Reward=-1.0\nStep 59: State=[-0.38985063 -0.01466986], Reward=-1.0\nStep 60: State=[-0.4044969 -0.01464627], Reward=-1.0\nStep 61: State=[-0.41901755 -0.01452065], Reward=-1.0\nStep 62: State=[-0.43330976 -0.0142922 ], Reward=-1.0\nStep 63: State=[-0.44727088 -0.01396112], Reward=-1.0\nStep 64: State=[-0.46079947 -0.01352859], Reward=-1.0\nStep 65: State=[-0.47379628 -0.0129968 ], Reward=-1.0\nStep 66: State=[-0.48616521 -0.01236894], Reward=-1.0\nStep 67: State=[-0.49781431 -0.0116491 ], Reward=-1.0\nStep 68: State=[-0.5086566 -0.01084229], Reward=-1.0\nStep 69: State=[-0.51861091 -0.00995432], Reward=-1.0\nStep 70: State=[-0.52760264 -0.00899172], Reward=-1.0\nStep 71: State=[-0.53556433 -0.0079617 ], Reward=-1.0\nStep 72: State=[-0.54243631 -0.00687197], Reward=-1.0\nStep 73: State=[-0.54816708 -0.00573077], Reward=-1.0\nStep 74: State=[-0.55271375 -0.00454667], Reward=-1.0\nStep 75: State=[-0.55604234 -0.00332859], Reward=-1.0\nStep 76: State=[-0.55812798 -0.00208564], Reward=-1.0\nStep 77: State=[-0.55895512 -0.00082714], Reward=-1.0\nStep 78: State=[-5.58517581e-01 4.37537106e-04], Reward=-1.0\nStep 79: State=[-0.55681863 0.00169895], Reward=-1.0\nStep 80: State=[-0.55387095 0.00294769], Reward=-1.0\nStep 81: State=[-0.54969653 0.00417442], Reward=-1.0\nStep 82: State=[-0.54432658 0.00536995], Reward=-1.0\nStep 83: State=[-0.53780127 0.00652531], Reward=-1.0\nStep 84: State=[-0.53016948 0.00763179], Reward=-1.0\nStep 85: State=[-0.52148841 0.00868107], Reward=-1.0\nStep 86: State=[-0.51182317 0.00966524], Reward=-1.0\nStep 87: State=[-0.50124622 0.01057694], Reward=-1.0\nStep 88: State=[-0.4898368 0.01140943], Reward=-1.0\nStep 89: State=[-0.47768015 0.01215664], Reward=-1.0\nStep 90: State=[-0.46486681 0.01281334], Reward=-1.0\nStep 91: State=[-0.45149168 0.01337513], Reward=-1.0\nStep 92: State=[-0.43765315 0.01383853], Reward=-1.0\nStep 93: State=[-0.42345209 0.01420106], Reward=-1.0\nStep 94: State=[-0.40899088 0.01446121], Reward=-1.0\nStep 95: State=[-0.3943724 0.01461848], Reward=-1.0\nStep 96: State=[-0.37969902 0.01467338], Reward=-1.0\nStep 97: State=[-0.36507167 0.01462735], Reward=-1.0\nStep 98: State=[-0.35058897 0.01448271], Reward=-1.0\nStep 99: State=[-0.33634635 0.01424261], Reward=-1.0\nStep 100: State=[-0.32243543 0.01391093], Reward=-1.0\nStep 101: State=[-0.3089433 0.01349213], Reward=-1.0\nStep 102: State=[-0.2959521 0.0129912], Reward=-1.0\nStep 103: State=[-0.28353859 0.01241351], Reward=-1.0\nStep 104: State=[-0.27177389 0.01176471], Reward=-1.0\nStep 105: State=[-0.26072327 0.01105062], Reward=-1.0\nStep 106: State=[-0.25044611 0.01027716], Reward=-1.0\nStep 107: State=[-0.2409959 0.00945022], Reward=-1.0\nStep 108: State=[-0.23242026 0.00857563], Reward=-1.0\nStep 109: State=[-0.22476114 0.00765912], Reward=-1.0\nStep 110: State=[-0.2180549 0.00670624], Reward=-1.0\nStep 111: State=[-0.21233255 0.00572235], Reward=-1.0\nStep 112: State=[-0.20761992 0.00471263], Reward=-1.0\nStep 113: State=[-0.20393781 0.0036821 ], Reward=-1.0\nStep 114: State=[-0.20130223 0.00263558], Reward=-1.0\nStep 115: State=[-0.19972446 0.00157777], Reward=-1.0\nStep 116: State=[-0.19921119 0.00051327], Reward=-1.0\nStep 117: State=[-0.1997646 -0.00055341], Reward=-1.0\nStep 118: State=[-0.20138234 -0.00161774], Reward=-1.0\nStep 119: State=[-0.20405755 -0.00267521], Reward=-1.0\nStep 120: State=[-0.20777876 -0.00372121], Reward=-1.0\nStep 121: State=[-0.21252982 -0.00475105], Reward=-1.0\nStep 122: State=[-0.2182897 -0.00575988], Reward=-1.0\nStep 123: State=[-0.2250324 -0.0067427], Reward=-1.0\nStep 124: State=[-0.23272672 -0.00769432], Reward=-1.0\nStep 125: State=[-0.24133607 -0.00860935], Reward=-1.0\nStep 126: State=[-0.25081832 -0.00948224], Reward=-1.0\nStep 127: State=[-0.26112559 -0.01030728], Reward=-1.0\nStep 128: State=[-0.27220421 -0.01107861], Reward=-1.0\nStep 129: State=[-0.28399455 -0.01179035], Reward=-1.0\nStep 130: State=[-0.29643113 -0.01243658], Reward=-1.0\nStep 131: State=[-0.30944261 -0.01301148], Reward=-1.0\nStep 132: State=[-0.32295202 -0.01350941], Reward=-1.0\nStep 133: State=[-0.33687704 -0.01392502], Reward=-1.0\nStep 134: State=[-0.35113038 -0.01425334], Reward=-1.0\nStep 135: State=[-0.36562028 -0.0144899 ], Reward=-1.0\nStep 136: State=[-0.38025116 -0.01463088], Reward=-1.0\nStep 137: State=[-0.39492432 -0.01467316], Reward=-1.0\nStep 138: State=[-0.40953874 -0.01461442], Reward=-1.0\nStep 139: State=[-0.42399202 -0.01445328], Reward=-1.0\nStep 140: State=[-0.43818128 -0.01418926], Reward=-1.0\nStep 141: State=[-0.45200419 -0.0138229 ], Reward=-1.0\nStep 142: State=[-0.46535993 -0.01335574], Reward=-1.0\nStep 143: State=[-0.47815025 -0.01279032], Reward=-1.0\nStep 144: State=[-0.49028037 -0.01213013], Reward=-1.0\nStep 145: State=[-0.50165997 -0.0113796 ], Reward=-1.0\nStep 146: State=[-0.51220399 -0.01054402], Reward=-1.0\nStep 147: State=[-0.52183345 -0.00962946], Reward=-1.0\nStep 148: State=[-0.53047616 -0.0086427 ], Reward=-1.0\nStep 149: State=[-0.53806728 -0.00759113], Reward=-1.0\nStep 150: State=[-0.54454993 -0.00648265], Reward=-1.0\nStep 151: State=[-0.54987555 -0.00532562], Reward=-1.0\nStep 152: State=[-0.55400429 -0.00412875], Reward=-1.0\nStep 153: State=[-0.55690531 -0.00290102], Reward=-1.0\nStep 154: State=[-0.55855695 -0.00165164], Reward=-1.0\nStep 155: State=[-5.5894688e-01 -3.8993055e-04], Reward=-1.0\nStep 156: State=[-0.5580722 0.00087468], Reward=-1.0\nStep 157: State=[-0.55593942 0.00213277], Reward=-1.0\nStep 158: State=[-0.55256447 0.00337495], Reward=-1.0\nStep 159: State=[-0.54797256 0.00459192], Reward=-1.0\nStep 160: State=[-0.542198 0.00577456], Reward=-1.0\nStep 161: State=[-0.53528402 0.00691398], Reward=-1.0\nStep 162: State=[-0.52728242 0.0080016 ], Reward=-1.0\nStep 163: State=[-0.51825319 0.00902923], Reward=-1.0\nStep 164: State=[-0.50826405 0.00998914], Reward=-1.0\nStep 165: State=[-0.49738988 0.01087417], Reward=-1.0\nStep 166: State=[-0.48571208 0.0116778 ], Reward=-1.0\nStep 167: State=[-0.47331782 0.01239426], Reward=-1.0\nStep 168: State=[-0.46029923 0.01301859], Reward=-1.0\nStep 169: State=[-0.44675254 0.01354669], Reward=-1.0\nStep 170: State=[-0.43277711 0.01397543], Reward=-1.0\nStep 171: State=[-0.41847444 0.01430267], Reward=-1.0\nStep 172: State=[-0.4039472 0.01452724], Reward=-1.0\nStep 173: State=[-0.3892982 0.014649 ], Reward=-1.0\nStep 174: State=[-0.37462943 0.01466878], Reward=-1.0\nStep 175: State=[-0.3600411 0.01458833], Reward=-1.0\nStep 176: State=[-0.34563082 0.01441028], Reward=-1.0\nStep 177: State=[-0.33149278 0.01413803], Reward=-1.0\nStep 178: State=[-0.3177171 0.01377568], Reward=-1.0\nStep 179: State=[-0.30438921 0.01332789], Reward=-1.0\nStep 180: State=[-0.29158942 0.01279979], Reward=-1.0\nStep 181: State=[-0.27939257 0.01219685], Reward=-1.0\nStep 182: State=[-0.26786777 0.0115248 ], Reward=-1.0\nStep 183: State=[-0.25707826 0.01078951], Reward=-1.0\nStep 184: State=[-0.24708137 0.00999688], Reward=-1.0\nStep 185: State=[-0.23792856 0.00915281], Reward=-1.0\nStep 186: State=[-0.22966547 0.00826309], Reward=-1.0\nStep 187: State=[-0.2223321 0.00733338], Reward=-1.0\nStep 188: State=[-0.21596293 0.00636917], Reward=-1.0\nStep 189: State=[-0.21058716 0.00537577], Reward=-1.0\nStep 190: State=[-0.20622886 0.0043583 ], Reward=-1.0\nStep 191: State=[-0.20290716 0.0033217 ], Reward=-1.0\nStep 192: State=[-0.20063641 0.00227075], Reward=-1.0\nStep 193: State=[-0.19942631 0.00121011], Reward=-1.0\nStep 194: State=[-1.99281965e-01 1.44341652e-04], Reward=-1.0\nStep 195: State=[-0.200204 -0.00092203], Reward=-1.0\nStep 196: State=[-0.20218851 -0.00198451], Reward=-1.0\nStep 197: State=[-0.20522704 -0.00303853], Reward=-1.0\nStep 198: State=[-0.20930653 -0.00407949], Reward=-1.0\nStep 199: State=[-0.21440914 -0.00510261], Reward=-1.0\nStep 200: State=[-0.22051217 -0.00610302], Reward=-1.0\n" ], [ "show_video()", "_____no_output_____" ] ], [ [ "### Programmed Car\n\nNow we will look at a car that I hand-programmed. This car is straightforward; however, it solves the problem. The programmed car always applies force to one direction or another. It does not break. Whatever direction the vehicle is currently rolling, the agent uses power in that direction. Therefore, the car begins to climb a hill, is overpowered, and turns backward. However, once it starts to roll backward force is immediately applied in this new direction.\n\nThe following code implements this preprogrammed car.", "_____no_output_____" ] ], [ [ "import gym\n\nif COLAB:\n env = wrap_env(gym.make(\"MountainCar-v0\"))\nelse:\n env = gym.make(\"MountainCar-v0\")\n \nstate = env.reset()\ndone = False\n\ni = 0\nwhile not done:\n i += 1\n \n if state[1]>0:\n action = 2\n else:\n action = 0\n \n state, reward, done, _ = env.step(action)\n env.render()\n print(f\"Step {i}: State={state}, Reward={reward}\")\n \nenv.close()", "Step 1: State=[-0.57730941 -0.00060338], Reward=-1.0\nStep 2: State=[-0.5785117 -0.00120229], Reward=-1.0\nStep 3: State=[-0.580304 -0.0017923], Reward=-1.0\nStep 4: State=[-0.58267307 -0.00236906], Reward=-1.0\nStep 5: State=[-0.58560139 -0.00292832], Reward=-1.0\nStep 6: State=[-0.58906736 -0.00346598], Reward=-1.0\nStep 7: State=[-0.59304548 -0.00397811], Reward=-1.0\nStep 8: State=[-0.5975065 -0.00446102], Reward=-1.0\nStep 9: State=[-0.60241775 -0.00491125], Reward=-1.0\nStep 10: State=[-0.60774335 -0.0053256 ], Reward=-1.0\nStep 11: State=[-0.61344454 -0.00570119], Reward=-1.0\nStep 12: State=[-0.61948002 -0.00603548], Reward=-1.0\nStep 13: State=[-0.62580627 -0.00632625], Reward=-1.0\nStep 14: State=[-0.63237791 -0.00657165], Reward=-1.0\nStep 15: State=[-0.63914812 -0.00677021], Reward=-1.0\nStep 16: State=[-0.64606896 -0.00692084], Reward=-1.0\nStep 17: State=[-0.65309179 -0.00702284], Reward=-1.0\nStep 18: State=[-0.66016768 -0.00707588], Reward=-1.0\nStep 19: State=[-0.66724771 -0.00708003], Reward=-1.0\nStep 20: State=[-0.67428342 -0.00703571], Reward=-1.0\nStep 21: State=[-0.68122709 -0.00694367], Reward=-1.0\nStep 22: State=[-0.68803212 -0.00680503], Reward=-1.0\nStep 23: State=[-0.69465331 -0.00662119], Reward=-1.0\nStep 24: State=[-0.70104716 -0.00639385], Reward=-1.0\nStep 25: State=[-0.70717213 -0.00612496], Reward=-1.0\nStep 26: State=[-0.71298884 -0.00581671], Reward=-1.0\nStep 27: State=[-0.71846032 -0.00547148], Reward=-1.0\nStep 28: State=[-0.72355218 -0.00509185], Reward=-1.0\nStep 29: State=[-0.72823271 -0.00468053], Reward=-1.0\nStep 30: State=[-0.73247309 -0.00424038], Reward=-1.0\nStep 31: State=[-0.73624744 -0.00377435], Reward=-1.0\nStep 32: State=[-0.73953293 -0.00328548], Reward=-1.0\nStep 33: State=[-0.74230982 -0.00277689], Reward=-1.0\nStep 34: State=[-0.74456157 -0.00225175], Reward=-1.0\nStep 35: State=[-0.74627483 -0.00171326], Reward=-1.0\nStep 36: State=[-0.7474395 -0.00116466], Reward=-1.0\nStep 37: State=[-7.48048712e-01 -6.09216585e-04], Reward=-1.0\nStep 38: State=[-7.48098908e-01 -5.01962094e-05], Reward=-1.0\nStep 39: State=[-7.47589789e-01 5.09118450e-04], Reward=-1.0\nStep 40: State=[-0.74452434 0.00306545], Reward=-1.0\nStep 41: State=[-0.73892063 0.00560372], Reward=-1.0\nStep 42: State=[-0.73081198 0.00810864], Reward=-1.0\nStep 43: State=[-0.72024742 0.01056456], Reward=-1.0\nStep 44: State=[-0.70729207 0.01295535], Reward=-1.0\nStep 45: State=[-0.6920277 0.01526437], Reward=-1.0\nStep 46: State=[-0.67455318 0.01747452], Reward=-1.0\nStep 47: State=[-0.6549848 0.01956837], Reward=-1.0\nStep 48: State=[-0.63345635 0.02152845], Reward=-1.0\nStep 49: State=[-0.61011881 0.02333755], Reward=-1.0\nStep 50: State=[-0.58513962 0.02497919], Reward=-1.0\nStep 51: State=[-0.5587015 0.02643812], Reward=-1.0\nStep 52: State=[-0.53100059 0.02770091], Reward=-1.0\nStep 53: State=[-0.50224417 0.02875642], Reward=-1.0\nStep 54: State=[-0.4726478 0.02959637], Reward=-1.0\nStep 55: State=[-0.44243208 0.03021572], Reward=-1.0\nStep 56: State=[-0.41181911 0.03061297], Reward=-1.0\nStep 57: State=[-0.38102886 0.03079025], Reward=-1.0\nStep 58: State=[-0.35027559 0.03075328], Reward=-1.0\nStep 59: State=[-0.31976445 0.03051114], Reward=-1.0\nStep 60: State=[-0.28968855 0.0300759 ], Reward=-1.0\nStep 61: State=[-0.26022651 0.02946204], Reward=-1.0\nStep 62: State=[-0.23154055 0.02868596], Reward=-1.0\nStep 63: State=[-0.20377533 0.02776522], Reward=-1.0\nStep 64: State=[-0.17705734 0.026718 ], Reward=-1.0\nStep 65: State=[-0.15149488 0.02556246], Reward=-1.0\nStep 66: State=[-0.12717863 0.02431624], Reward=-1.0\nStep 67: State=[-0.10418263 0.02299601], Reward=-1.0\nStep 68: State=[-0.0825655 0.02161713], Reward=-1.0\nStep 69: State=[-0.06237207 0.02019343], Reward=-1.0\nStep 70: State=[-0.04363501 0.01873706], Reward=-1.0\nStep 71: State=[-0.02637656 0.01725845], Reward=-1.0\nStep 72: State=[-0.01061028 0.01576628], Reward=-1.0\nStep 73: State=[0.00365726 0.01426754], Reward=-1.0\nStep 74: State=[0.01642496 0.01276769], Reward=-1.0\nStep 75: State=[0.02769568 0.01127073], Reward=-1.0\nStep 76: State=[0.03747504 0.00977935], Reward=-1.0\nStep 77: State=[0.04577017 0.00829513], Reward=-1.0\nStep 78: State=[0.05258884 0.00681867], Reward=-1.0\nStep 79: State=[0.05793855 0.00534971], Reward=-1.0\nStep 80: State=[0.06182593 0.00388738], Reward=-1.0\nStep 81: State=[0.0642562 0.00243026], Reward=-1.0\nStep 82: State=[0.06523276 0.00097657], Reward=-1.0\nStep 83: State=[ 0.06475705 -0.00047571], Reward=-1.0\nStep 84: State=[ 0.06082837 -0.00392868], Reward=-1.0\nStep 85: State=[ 0.0534412 -0.00738717], Reward=-1.0\nStep 86: State=[ 0.04258609 -0.01085511], Reward=-1.0\nStep 87: State=[ 0.02825135 -0.01433474], Reward=-1.0\nStep 88: State=[ 0.01042559 -0.01782576], Reward=-1.0\nStep 89: State=[-0.01089895 -0.02132454], Reward=-1.0\nStep 90: State=[-0.03572216 -0.0248232 ], Reward=-1.0\nStep 91: State=[-0.06403102 -0.02830886], Reward=-1.0\nStep 92: State=[-0.0957939 -0.03176288], Reward=-1.0\nStep 93: State=[-0.13095425 -0.03516035], Reward=-1.0\nStep 94: State=[-0.16942414 -0.03846989], Reward=-1.0\nStep 95: State=[-0.21107801 -0.04165386], Reward=-1.0\nStep 96: State=[-0.25574716 -0.04466916], Reward=-1.0\nStep 97: State=[-0.30321589 -0.04746873], Reward=-1.0\nStep 98: State=[-0.35321967 -0.05000379], Reward=-1.0\nStep 99: State=[-0.40544638 -0.05222671], Reward=-1.0\nStep 100: State=[-0.4595408 -0.05409441], Reward=-1.0\nStep 101: State=[-0.51511269 -0.0555719 ], Reward=-1.0\nStep 102: State=[-0.57174823 -0.05663553], Reward=-1.0\nStep 103: State=[-0.6290239 -0.05727567], Reward=-1.0\nStep 104: State=[-0.68652199 -0.0574981 ], Reward=-1.0\nStep 105: State=[-0.74384624 -0.05732425], Reward=-1.0\nStep 106: State=[-0.80063623 -0.05678999], Reward=-1.0\nStep 107: State=[-0.85657951 -0.05594328], Reward=-1.0\nStep 108: State=[-0.91142055 -0.05484104], Reward=-1.0\nStep 109: State=[-0.96496613 -0.05354558], Reward=-1.0\nStep 110: State=[-1.0170874 -0.05212127], Reward=-1.0\nStep 111: State=[-1.06771887 -0.05063146], Reward=-1.0\nStep 112: State=[-1.11685507 -0.0491362 ], Reward=-1.0\nStep 113: State=[-1.16454566 -0.04769059], Reward=-1.0\nStep 114: State=[-1.2 0. ], Reward=-1.0\nStep 115: State=[-1.1987581 0.0012419], Reward=-1.0\nStep 116: State=[-1.19427021 0.0044879 ], Reward=-1.0\nStep 117: State=[-1.18652173 0.00774848], Reward=-1.0\nStep 118: State=[-1.17548846 0.01103326], Reward=-1.0\nStep 119: State=[-1.16113808 0.01435038], Reward=-1.0\nStep 120: State=[-1.14343234 0.01770574], Reward=-1.0\nStep 121: State=[-1.12233007 0.02110228], Reward=-1.0\nStep 122: State=[-1.09779103 0.02453904], Reward=-1.0\nStep 123: State=[-1.06978073 0.0280103 ], Reward=-1.0\nStep 124: State=[-1.03827616 0.03150456], Reward=-1.0\nStep 125: State=[-1.0032725 0.03500367], Reward=-1.0\nStep 126: State=[-0.9647905 0.03848199], Reward=-1.0\nStep 127: State=[-0.92288452 0.04190598], Reward=-1.0\nStep 128: State=[-0.87765038 0.04523414], Reward=-1.0\nStep 129: State=[-0.82923273 0.04841765], Reward=-1.0\nStep 130: State=[-0.77783078 0.05140195], Reward=-1.0\nStep 131: State=[-0.72370164 0.05412914], Reward=-1.0\nStep 132: State=[-0.66716026 0.05654138], Reward=-1.0\nStep 133: State=[-0.60857514 0.05858511], Reward=-1.0\nStep 134: State=[-0.54835959 0.06021555], Reward=-1.0\nStep 135: State=[-0.4869585 0.06140109], Reward=-1.0\nStep 136: State=[-0.42483166 0.06212684], Reward=-1.0\nStep 137: State=[-0.36243478 0.06239688], Reward=-1.0\nStep 138: State=[-0.30020009 0.06223469], Reward=-1.0\nStep 139: State=[-0.23851824 0.06168185], Reward=-1.0\nStep 140: State=[-0.17772322 0.06079502], Reward=-1.0\nStep 141: State=[-0.1180812 0.05964202], Reward=-1.0\nStep 142: State=[-0.05978395 0.05829725], Reward=-1.0\nStep 143: State=[-0.0029466 0.05683735], Reward=-1.0\nStep 144: State=[0.05239085 0.05533745], Reward=-1.0\nStep 145: State=[0.10625911 0.05386826], Reward=-1.0\nStep 146: State=[0.15875332 0.05249421], Reward=-1.0\nStep 147: State=[0.21002575 0.05127242], Reward=-1.0\nStep 148: State=[0.26027822 0.05025247], Reward=-1.0\nStep 149: State=[0.30975487 0.04947665], Reward=-1.0\nStep 150: State=[0.35873547 0.0489806 ], Reward=-1.0\nStep 151: State=[0.40752939 0.04879392], Reward=-1.0\nStep 152: State=[0.45647027 0.04894088], Reward=-1.0\nStep 153: State=[0.50591109 0.04944082], Reward=-1.0\n" ] ], [ [ "We now visualize the preprogrammed car solving the problem.", "_____no_output_____" ] ], [ [ "show_video()", "_____no_output_____" ] ], [ [ "### Reinforcement Learning\n\nQ-Learning is a system of rewards that the algorithm gives an agent for successfully moving the environment into a state considered successful. These rewards are the Q-values from which this algorithm takes its name. The final output from the Q-Learning algorithm is a table of Q-values that indicate the reward value of every action that the agent can take, given every possible environment state. The agent must bin continuous state values into a fixed finite number of columns.\n\nLearning occurs when the algorithm runs the agent and environment through a series of episodes and updates the Q-values based on the rewards received from actions taken; Figure 12.REINF provides a high-level overview of this reinforcement or Q-Learning loop.\n\n**Figure 12.REINF:Reinforcement/Q Learning**\n![Reinforcement Learning](https://raw.githubusercontent.com/jeffheaton/t81_558_deep_learning/master/images/reinforcement.png \"Reinforcement Learning\")\n\nThe Q-values can dictate action by selecting the action column with the highest Q-value for the current environment state. The choice between choosing a random action and a Q-value driven action is governed by the epsilon ($\\epsilon$) parameter, which is the probability of random action.\n\nEach time through the training loop, the training algorithm updates the Q-values according to the following equation.\n", "_____no_output_____" ], [ "$Q^{new}(s_{t},a_{t}) \\leftarrow \\underbrace{Q(s_{t},a_{t})}_{\\text{old value}} + \\underbrace{\\alpha}_{\\text{learning rate}} \\cdot \\overbrace{\\bigg( \\underbrace{\\underbrace{r_{t}}_{\\text{reward}} + \\underbrace{\\gamma}_{\\text{discount factor}} \\cdot \\underbrace{\\max_{a}Q(s_{t+1}, a)}_{\\text{estimate of optimal future value}}}_{\\text{new value (temporal difference target)}} - \\underbrace{Q(s_{t},a_{t})}_{\\text{old value}} \\bigg) }^{\\text{temporal difference}}$\n\nThere are several parameters in this equation:\n* alpha ($\\alpha$) - The learning rate, how much should the current step cause the Q-values to be updated.\n* lambda ($\\lambda$) - The discount factor is the percentage of future reward that the algorithm should consider in this update.\n\nThis equation modifies several values:\n\n* $Q(s_t,a_t)$ - The Q-table. For each combination of states, what reward would the agent likely receive for performing each action?\n* $s_t$ - The current state.\n* $r_t$ - The last reward received.\n* $a_t$ - The action that the agent will perform.\n\nThe equation works by calculating a delta (temporal difference) that the equation should apply to the old state. This learning rate ($\\alpha$) scales this delta. A learning rate of 1.0 would fully implement the temporal difference to the Q-values each iteration and would likely be very chaotic.\n\nThere are two parts to the temporal difference: the new and old values. The new value is subtracted from the old value to provide a delta; the full amount that we would change the Q-value by if the learning rate did not scale this value. The new value is a summation of the reward received from the last action and the maximum of the Q-values from the resulting state when the client takes this action. It is essential to add the maximum of action Q-values for the new state because it estimates the optimal future values from proceeding with this action. \n\n### Q-Learning Car\n\nWe will now use Q-Learning to produce a car that learns to drive itself. Look out, Tesla! We begin by defining two essential functions.\n", "_____no_output_____" ] ], [ [ "import gym\nimport numpy as np\n\n# This function converts the floating point state values into \n# discrete values. This is often called binning. We divide \n# the range that the state values might occupy and assign \n# each region to a bucket.\ndef calc_discrete_state(state):\n discrete_state = (state - env.observation_space.low)/buckets\n return tuple(discrete_state.astype(np.int)) \n\n# Run one game. The q_table to use is provided. We also \n# provide a flag to indicate if the game should be \n# rendered/animated. Finally, we also provide\n# a flag to indicate if the q_table should be updated.\ndef run_game(q_table, render, should_update):\n done = False\n discrete_state = calc_discrete_state(env.reset())\n success = False\n \n while not done:\n # Exploit or explore\n if np.random.random() > epsilon:\n # Exploit - use q-table to take current best action \n # (and probably refine)\n action = np.argmax(q_table[discrete_state])\n else:\n # Explore - t\n action = np.random.randint(0, env.action_space.n)\n \n # Run simulation step\n new_state, reward, done, _ = env.step(action)\n \n # Convert continuous state to discrete\n new_state_disc = calc_discrete_state(new_state)\n\n # Have we reached the goal position (have we won?)?\n if new_state[0] >= env.unwrapped.goal_position:\n success = True\n \n # Update q-table\n if should_update:\n max_future_q = np.max(q_table[new_state_disc])\n current_q = q_table[discrete_state + (action,)]\n new_q = (1 - LEARNING_RATE) * current_q + LEARNING_RATE * \\\n (reward + DISCOUNT * max_future_q)\n q_table[discrete_state + (action,)] = new_q\n\n discrete_state = new_state_disc\n \n if render:\n env.render()\n \n return success\n", "_____no_output_____" ] ], [ [ "Several hyperparameters are very important for Q-Learning. These parameters will likely need adjustment as you apply Q-Learning to other problems. Because of this, it is crucial to understand the role of each parameter.\n\n* **LEARNING_RATE** The rate at which previous Q-values are updated based on new episodes run during training. \n* **DISCOUNT** The amount of significance to give estimates of future rewards when added to the reward for the current action taken. A value of 0.95 would indicate a discount of 5% to the future reward estimates. \n* **EPISODES** The number of episodes to train over. Increase this for more complex problems; however, training time also increases.\n* **SHOW_EVERY** How many episodes to allow to elapse before showing an update.\n* **DISCRETE_GRID_SIZE** How many buckets to use when converting each of the continuous state variables. For example, [10, 10] indicates that the algorithm should use ten buckets for the first and second state variables.\n* **START_EPSILON_DECAYING** Epsilon is the probability that the agent will select a random action over what the Q-Table suggests. This value determines the starting probability of randomness.\n* **END_EPSILON_DECAYING** How many episodes should elapse before epsilon goes to zero and no random actions are permitted. For example, EPISODES//10 means only the first 1/10th of the episodes might have random actions.", "_____no_output_____" ] ], [ [ "LEARNING_RATE = 0.1\nDISCOUNT = 0.95\nEPISODES = 50000\nSHOW_EVERY = 1000\n\nDISCRETE_GRID_SIZE = [10, 10]\nSTART_EPSILON_DECAYING = 0.5\nEND_EPSILON_DECAYING = EPISODES//10", "_____no_output_____" ] ], [ [ "We can now make the environment. If we are running in Google COLAB then we wrap the environment to be displayed inside the web browser. Next create the discrete buckets for state and build Q-table.", "_____no_output_____" ] ], [ [ "if COLAB:\n env = wrap_env(gym.make(\"MountainCar-v0\"))\nelse:\n env = gym.make(\"MountainCar-v0\")\n\nepsilon = 1 \nepsilon_change = epsilon/(END_EPSILON_DECAYING - START_EPSILON_DECAYING)\nbuckets = (env.observation_space.high - env.observation_space.low) \\\n /DISCRETE_GRID_SIZE\nq_table = np.random.uniform(low=-3, high=0, size=(DISCRETE_GRID_SIZE \\\n + [env.action_space.n]))\nsuccess = False", "_____no_output_____" ] ], [ [ "We can now make the environment. If we are running in Google COLAB then we wrap the environment to be displayed inside the web browser. Next, create the discrete buckets for state and build Q-table.", "_____no_output_____" ] ], [ [ "episode = 0\nsuccess_count = 0\n\n# Loop through the required number of episodes\nwhile episode<EPISODES:\n episode+=1\n done = False\n\n # Run the game. If we are local, display render animation at SHOW_EVERY\n # intervals. \n if episode % SHOW_EVERY == 0:\n print(f\"Current episode: {episode}, success: {success_count}\" +\\\n \" ({float(success_count)/SHOW_EVERY})\")\n success = run_game(q_table, True, False)\n success_count = 0\n else:\n success = run_game(q_table, False, True)\n \n # Count successes\n if success:\n success_count += 1\n\n # Move epsilon towards its ending value, if it still needs to move\n if END_EPSILON_DECAYING >= episode >= START_EPSILON_DECAYING:\n epsilon = max(0, epsilon - epsilon_change)\n\nprint(success)", "Current episode: 1000, success: 0 (0.0)\nCurrent episode: 2000, success: 0 (0.0)\nCurrent episode: 3000, success: 0 (0.0)\nCurrent episode: 4000, success: 29 (0.029)\nCurrent episode: 5000, success: 345 (0.345)\nCurrent episode: 6000, success: 834 (0.834)\nCurrent episode: 7000, success: 797 (0.797)\nCurrent episode: 8000, success: 679 (0.679)\nCurrent episode: 9000, success: 600 (0.6)\nCurrent episode: 10000, success: 728 (0.728)\nCurrent episode: 11000, success: 205 (0.205)\nCurrent episode: 12000, success: 612 (0.612)\nCurrent episode: 13000, success: 733 (0.733)\nCurrent episode: 14000, success: 1000 (1.0)\nCurrent episode: 15000, success: 998 (0.998)\nCurrent episode: 16000, success: 879 (0.879)\nCurrent episode: 17000, success: 510 (0.51)\nCurrent episode: 18000, success: 615 (0.615)\nCurrent episode: 19000, success: 220 (0.22)\nCurrent episode: 20000, success: 445 (0.445)\nCurrent episode: 21000, success: 627 (0.627)\nCurrent episode: 22000, success: 597 (0.597)\nCurrent episode: 23000, success: 827 (0.827)\nCurrent episode: 24000, success: 862 (0.862)\nCurrent episode: 25000, success: 322 (0.322)\nCurrent episode: 26000, success: 632 (0.632)\nCurrent episode: 27000, success: 613 (0.613)\nCurrent episode: 28000, success: 409 (0.409)\nCurrent episode: 29000, success: 379 (0.379)\nCurrent episode: 30000, success: 320 (0.32)\nCurrent episode: 31000, success: 327 (0.327)\nCurrent episode: 32000, success: 302 (0.302)\nCurrent episode: 33000, success: 308 (0.308)\nCurrent episode: 34000, success: 336 (0.336)\nCurrent episode: 35000, success: 274 (0.274)\nCurrent episode: 36000, success: 281 (0.281)\nCurrent episode: 37000, success: 301 (0.301)\nCurrent episode: 38000, success: 322 (0.322)\nCurrent episode: 39000, success: 292 (0.292)\nCurrent episode: 40000, success: 299 (0.299)\nCurrent episode: 41000, success: 281 (0.281)\nCurrent episode: 42000, success: 233 (0.233)\nCurrent episode: 43000, success: 380 (0.38)\nCurrent episode: 44000, success: 598 (0.598)\nCurrent episode: 45000, success: 933 (0.933)\nCurrent episode: 46000, success: 986 (0.986)\nCurrent episode: 47000, success: 1000 (1.0)\nCurrent episode: 48000, success: 1000 (1.0)\nCurrent episode: 49000, success: 1000 (1.0)\nCurrent episode: 50000, success: 1000 (1.0)\nTrue\n" ] ], [ [ "As you can see, the number of successful episodes generally increases as training progresses. It is not advisable to stop the first time that we observe 100% success over 1,000 episodes. There is a randomness to most games, so it is not likely that an agent would retain its 100% success rate with a new run. Once you observe that the agent has gotten 100% for several update intervals, it might be safe to stop training.", "_____no_output_____" ], [ "# Running and Observing the Agent\n\nNow that the algorithm has trained the agent, we can observe the agent in action. You can use the following code to see the agent in action.", "_____no_output_____" ] ], [ [ "run_game(q_table, True, False)\nshow_video()", "_____no_output_____" ] ], [ [ "# Inspecting the Q-Table\n\nWe can also display the Q-table. The following code shows the action that the agent would perform for each environment state. As the weights of a neural network, this table is not straightforward to interpret. Some patterns do emerge in that directions do arise, as seen by calculating the means of rows and columns. The actions seem consistent at upper and lower halves of both velocity and position.", "_____no_output_____" ] ], [ [ "import pandas as pd\n\ndf = pd.DataFrame(q_table.argmax(axis=2))\n\ndf.columns = [f'v-{x}' for x in range(DISCRETE_GRID_SIZE[0])]\ndf.index = [f'p-{x}' for x in range(DISCRETE_GRID_SIZE[1])]\ndf", "_____no_output_____" ], [ "df.mean(axis=0)", "_____no_output_____" ], [ "df.mean(axis=1)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
4a00ca15d09abc6430033a2b895b7099b60537b9
685,477
ipynb
Jupyter Notebook
_notebooks/2019-08-31-Regression-pricing.ipynb
emilearthur/fastblog
22ad83ec6e6cf85209bcf541a15d72e303efdcfb
[ "Apache-2.0" ]
null
null
null
_notebooks/2019-08-31-Regression-pricing.ipynb
emilearthur/fastblog
22ad83ec6e6cf85209bcf541a15d72e303efdcfb
[ "Apache-2.0" ]
null
null
null
_notebooks/2019-08-31-Regression-pricing.ipynb
emilearthur/fastblog
22ad83ec6e6cf85209bcf541a15d72e303efdcfb
[ "Apache-2.0" ]
null
null
null
375.603836
152,636
0.925752
[ [ [ "# BikeBuyer Regression\n", "_____no_output_____" ] ], [ [ "# importing libraries \nimport pandas as pd \nimport matplotlib.pyplot as plt \nimport seaborn as sns \nimport numpy as np \nimport numpy.random as nr \nimport math \n\n%matplotlib inline ", "_____no_output_____" ], [ "# loading data \ncustomer_info = pd.read_csv('Data/AdvWorksCusts.csv')\ncustomer_spending = pd.read_csv('Data/AW_AveMonthSpend.csv')\ncustomer_has_bike = pd.read_csv('Data/AW_BikeBuyer.csv')", "_____no_output_____" ], [ "# checking for duplicate and removing them \nprint(\"For customer_info: \" + \"\\n\")\nprint(customer_info.shape)\nprint(customer_info.CustomerID.unique().shape)\nprint(\"\\n\" + \"For customer_spending\" + \"\\n\")\nprint(customer_spending.shape)\nprint(customer_spending.CustomerID.unique().shape)\nprint(\"\\n\" + \"For customer_has_bike\" + \"\\n\")\nprint(customer_has_bike.shape)\nprint(customer_has_bike.CustomerID.unique().shape)\n\n# dropping duplicate if they exist \ncustomer_info.drop_duplicates(subset='CustomerID',keep='last', inplace=True)\ncustomer_spending.drop_duplicates(subset='CustomerID',keep='last',inplace=True)\ncustomer_has_bike.drop_duplicates(subset='CustomerID',keep='last',inplace=True)\n\n# checking if duplicate are dropped\nprint(\"For customer_info: \" + \"\\n\")\nprint(customer_info.shape)\nprint(customer_info.CustomerID.unique().shape)\nprint(\"\\n\" + \"For customer_spending\" + \"\\n\")\nprint(customer_spending.shape)\nprint(customer_spending.CustomerID.unique().shape)\nprint(\"\\n\" + \"For customer_has_bike\" + \"\\n\")\nprint(customer_has_bike.shape)\nprint(customer_has_bike.CustomerID.unique().shape)\n", "For customer_info: \n\n(16519, 23)\n(16404,)\n\nFor customer_spending\n\n(16519, 2)\n(16404,)\n\nFor customer_has_bike\n\n(16519, 2)\n(16404,)\nFor customer_info: \n\n(16404, 23)\n(16404,)\n\nFor customer_spending\n\n(16404, 2)\n(16404,)\n\nFor customer_has_bike\n\n(16404, 2)\n(16404,)\n" ], [ "# checking for null or missign values in all datasets \nprint((customer_info.astype(np.object).isnull()).any())\nprint((customer_spending.astype(np.object).isnull().any()))\nprint((customer_has_bike.astype(np.object).isnull().any()))", "CustomerID False\nTitle True\nFirstName False\nMiddleName True\nLastName False\nSuffix True\nAddressLine1 False\nAddressLine2 True\nCity False\nStateProvinceName False\nCountryRegionName False\nPostalCode False\nPhoneNumber False\nBirthDate False\nEducation False\nOccupation False\nGender False\nMaritalStatus False\nHomeOwnerFlag False\nNumberCarsOwned False\nNumberChildrenAtHome False\nTotalChildren False\nYearlyIncome False\ndtype: bool\nCustomerID False\nAveMonthSpend False\ndtype: bool\nCustomerID False\nBikeBuyer False\ndtype: bool\n" ] ], [ [ "Doing some exploratory analysis once the data is cleaned ", "_____no_output_____" ] ], [ [ "print(round(customer_info.describe(),2))\nprint(\"\\n\")\nprint(round(customer_spending.describe(),2))\nprint(\"\\n\")\nprint(round(customer_has_bike.describe(),2))", " CustomerID HomeOwnerFlag NumberCarsOwned NumberChildrenAtHome \\\ncount 16404.00 16404.00 16404.00 16404.00 \nmean 20240.14 0.67 1.50 0.99 \nstd 5340.37 0.47 1.14 1.51 \nmin 11000.00 0.00 0.00 0.00 \n25% 15617.75 0.00 1.00 0.00 \n50% 20231.50 1.00 2.00 0.00 \n75% 24862.25 1.00 2.00 2.00 \nmax 29482.00 1.00 4.00 5.00 \n\n TotalChildren YearlyIncome \ncount 16404.00 16404.00 \nmean 2.00 78129.67 \nstd 1.68 39728.38 \nmin 0.00 9482.00 \n25% 0.00 47808.75 \n50% 2.00 76125.00 \n75% 3.00 105211.75 \nmax 5.00 196511.00 \n\n\n CustomerID AveMonthSpend\ncount 16404.00 16404.00\nmean 20240.14 72.39\nstd 5340.37 27.27\nmin 11000.00 22.00\n25% 15617.75 52.00\n50% 20231.50 68.00\n75% 24862.25 84.00\nmax 29482.00 176.00\n\n\n CustomerID BikeBuyer\ncount 16404.00 16404.00\nmean 20240.14 0.33\nstd 5340.37 0.47\nmin 11000.00 0.00\n25% 15617.75 0.00\n50% 20231.50 0.00\n75% 24862.25 1.00\nmax 29482.00 1.00\n" ], [ "#merging data customer_info data and customer_spending data for modeling \ndata = customer_info.merge(customer_spending, on='CustomerID', how='left')\ndata.head()", "_____no_output_____" ] ], [ [ "#### Below function is useful but I prefer you use the other\n#### because its simple", "_____no_output_____" ] ], [ [ "from datetime import datetime \nfrom dateutil.parser import parse\n\ndef generate_age(data, format):\n collect_date = birthday = datetime(1998,1,1,0,0,0)\n age = [] \n for index, row in data.iterrows():\n cust_date = datetime.strptime(row['BirthDate'], format)\n age.append(int((collect_date - cust_date).days/365))\n return age \n", "_____no_output_____" ], [ "data['Age'] = generate_age(data, '%Y-%m-%d')\ndata[['BirthDate','Age']].head()", "_____no_output_____" ] ], [ [ "#### generating age since we given the birthrate \n#### This function for generating age work but it not safe since it does work with one form of format.\nfrom datetime import datetime \nfrom datetime import date\ndef calcute_age(age):\n cust_date = datetime.strptime(age, \"%Y-%m-%d\")\n f_date = date(1998,1,1)\n return f_date.year - cust_date.year - ((f_date.month, f_date.day) <(cust_date.month, cust_date.day))\ndata['Age'] = data['BirthDate'].apply(calcute_age)", "_____no_output_____" ], [ "data[['BirthDate','Age']].head()", "_____no_output_____" ] ], [ [ "def plot_scatter(auto_prices, cols, col_y= 'AveMonthSpend'):\n for col in cols:\n fig = plt.figure(figsize=(7,6)) # define plot area \n ax = fig.gca() # define axis \n auto_prices.plot.scatter(x= col, y=col_y, ax= ax)\n ax.set_title('Scatter plot of ' + col_y + ' vs. ' + col) #title of the plot\n ax.set_xlabel(col) #set x axis text\n ax.set_ylabel(col_y) #set y axis text\n plt.show()", "_____no_output_____" ], [ "cols=['NumberChildrenAtHome','NumberCarsOwned','TotalChildren']\nplot_scatter(data,cols)", "_____no_output_____" ], [ "cols= ['AveMonthSpend','YearlyIncome','Age']\nsns.pairplot(data[cols], palette=\"Set2\", diag_kind=\"kde\", size=2).map_upper(sns.kdeplot,cmap=\"Blues_d\")", "/usr/local/lib/python3.7/dist-packages/seaborn/axisgrid.py:2065: UserWarning: The `size` parameter has been renamed to `height`; pleaes update your code.\n warnings.warn(msg, UserWarning)\n/usr/lib/python3/dist-packages/scipy/stats/stats.py:1713: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.\n return np.add.reduce(sorted[indexer] * weights, axis=axis) / sumval\n" ], [ "def plot_box(auto_prices, cols, col_y='AveMonthSpend'):\n for col in cols:\n sns.set_style(\"whitegrid\")\n sns.boxplot(col,col_y, data=auto_prices) \n plt.xlabel(col) #set x axis text\n plt.ylabel(col_y) #set y axis text \n plt.show()", "_____no_output_____" ], [ "cols= ['Occupation','Gender','MaritalStatus','HomeOwnerFlag']\nplot_box(data, cols)", "_____no_output_____" ] ], [ [ "After visualizations above we selected the following features for model: Gender, MaritalStatus, HomeOwnerFlag, Occupation, Age, YearlyIncme and NumberChildrenAtHome", "_____no_output_____" ] ], [ [ "# Grouping Categorical and numerical data \ncategorical_features= ['Gender','MaritalStatus','HomeOwnerFlag','Occupation']\nnumeric_features= ['Age','YearlyIncome','NumberChildrenAtHome']", "_____no_output_____" ], [ "# define encoder for categorical \nfrom sklearn import preprocessing\nimport sklearn.model_selection as ms \nfrom sklearn import linear_model\nimport sklearn.metrics as sklm\n\ndef encode_string(cat_features):\n enc= preprocessing.LabelEncoder()\n enc.fit(cat_features)\n enc_cat_features= enc.transform(cat_features)\n ohe= preprocessing.OneHotEncoder()\n encoded= ohe.fit(enc_cat_features.reshape(-1,1))\n return encoded.transform(enc_cat_features.reshape(-1,1)).toarray()", "_____no_output_____" ], [ "def encode_cat_features(features):\n categorical_features= ['Gender','MaritalStatus','HomeOwnerFlag']\n f= encode_string(features['Occupation'])\n for cat in categorical_features:\n enc= encode_string(features[cat])\n f= np.concatenate([f,enc],1)\n return f", "_____no_output_____" ], [ "labels = np.array(data.AveMonthSpend)\nselected = numeric_features + categorical_features\nfeatures = data[selected]\nprint(labels)\nprint(features.head())", "[ 89 117 123 ... 79 65 68]\n Age YearlyIncome NumberChildrenAtHome Gender MaritalStatus \\\n0 31 137947 0 M M \n1 32 101141 3 M S \n2 32 91945 3 M M \n3 29 86688 0 F S \n4 29 92771 5 F S \n\n HomeOwnerFlag Occupation \n0 1 Professional \n1 0 Professional \n2 1 Professional \n3 0 Professional \n4 1 Professional \n" ], [ "#encoding features \nencoded_features= encode_cat_features(features)\nprint(encoded_features[:,:])\n#selecting numeric features and converting them to array\nnumeric_features= np.array(data[numeric_features])\nprint(numeric_features[:,:])", "[[0. 0. 0. ... 0. 0. 1.]\n [0. 0. 0. ... 1. 1. 0.]\n [0. 0. 0. ... 0. 0. 1.]\n ...\n [0. 1. 0. ... 0. 0. 1.]\n [0. 0. 0. ... 0. 0. 1.]\n [0. 0. 0. ... 1. 0. 1.]]\n[[ 31 137947 0]\n [ 32 101141 3]\n [ 32 91945 3]\n ...\n [ 58 133053 0]\n [ 51 31930 0]\n [ 52 59382 0]]\n" ], [ "# Combining numeric and encoded features into 1 feature \nfeatures= np.concatenate([encoded_features,numeric_features],1)\nprint(features.shape)\nprint(features[:1,:])", "(16404, 14)\n[[0.00000e+00 0.00000e+00 0.00000e+00 1.00000e+00 0.00000e+00 0.00000e+00\n 1.00000e+00 1.00000e+00 0.00000e+00 0.00000e+00 1.00000e+00 3.10000e+01\n 1.37947e+05 0.00000e+00]]\n" ], [ "# spliting data into training and test datasets \nnr.seed(9988)\nindx= range(features.shape[0])\nindx= ms.train_test_split(indx, test_size= 300)\nX_train= features[indx[0],:]\ny_train= np.ravel(labels[indx[0]])\nX_test= features[indx[1],:]\ny_test= np.ravel(labels[indx[1]])", "_____no_output_____" ], [ "\n# Scaling the data to avoid features having different magnitudes \n#scalar= preprocessing.MinMaxScaler(feature_range=(-1,1)).fit(X_train[:,11:])\nscaler = preprocessing.StandardScaler().fit(X_train[:,11:13])\nX_train[:,11:13] = scaler.transform(X_train[:,11:13])\nX_test[:,11:13] = scaler.transform(X_test[:,11:13])\nX_train[:2]", "_____no_output_____" ] ], [ [ "Now Features are prepared we try it on models ", "_____no_output_____" ] ], [ [ "# using the linear regression model to define and fit", "_____no_output_____" ], [ "lin_mod= linear_model.Ridge(alpha = 0.05)\nlin_mod.fit(X_train,y_train)\nprint(lin_mod.intercept_)\nprint(lin_mod.coef_)", "60.84370494780248\n[ 6.62453133e-01 -2.17172381e+00 -1.79250561e-01 8.84114274e-01\n 8.04406961e-01 -1.40857718e+01 1.40857718e+01 3.09843371e+00\n -3.09843371e+00 3.71975025e-03 -3.71975028e-03 -1.24172260e+00\n 8.20152838e+00 1.10719302e+01]\n" ], [ "# tunning the model to fine the best alpha \nalphas = np.array([0.1,0.01,0.001,0.0001,0,0.01,0.05,0.04,0.03,0.02,1,2,3,4,5,6,7,8,9,10])\nlin_mod= linear_model.Ridge()\nlinRidge_clf = ms.GridSearchCV(estimator=lin_mod, param_grid=dict(alpha=alphas))\nlinRidge_clf.fit(X_train,y_train)\n#summarize results of grid search \nprint(linRidge_clf.best_score_)\nprint(linRidge_clf.best_estimator_.alpha)", "/usr/local/lib/python3.7/dist-packages/sklearn/model_selection/_split.py:2053: FutureWarning: You should specify a value for 'cv' instead of relying on the default value. The default value will change from 3 to 5 in version 0.22.\n warnings.warn(CV_WARNING, FutureWarning)\n" ], [ "# fitting the alpa value into the model.\nlin_mod= linear_model.Ridge(alpha = 3.0)\nlin_mod.fit(X_train,y_train)\nprint(lin_mod.intercept_)\nprint(lin_mod.coef_)", "60.84421760308023\n[ 6.59621956e-01 -2.16533531e+00 -1.82820198e-01 8.85518088e-01\n 8.03015464e-01 -1.40810939e+01 1.40810939e+01 3.09746333e+00\n -3.09746333e+00 3.24850901e-03 -3.24850901e-03 -1.24213121e+00\n 8.19896041e+00 1.10711523e+01]\n" ], [ "# function to calcuclate the matrices\ndef print_metrics(y_true, y_predicted):\n # compute R^2 and the adjusted R^2\n r2= sklm.r2_score(y_true,y_predicted)\n n= X_test.shape[0]\n p= X_test.shape[1]-1\n r2_adj= 1-(1-r2)*((n-1)/(n-p-1))\n ## Print the usual metrics and the R^2 values\n print('Mean Square Error = ' + str(sklm.mean_squared_error(y_true, y_predicted)))\n print('Root Mean Square Error = ' + str(math.sqrt(sklm.mean_squared_error(y_true, y_predicted))))\n print('Mean Absolute Error = ' + str(sklm.mean_absolute_error(y_true, y_predicted)))\n print('Median Absolute Error = ' + str(sklm.median_absolute_error(y_true, y_predicted)))\n print('R^2 = ' + str(r2))\n print('Adjusted R^2 = ' + str(r2_adj)) ", "_____no_output_____" ], [ "# function to calculate accuracy \ndef print_evalute(y_true_, y_predicted_):\n errors= abs(y_predicted_ - y_true_)\n mape_= 100* np.mean(errors/y_true_)\n accuracy= 100 - mape_\n print('Model Performance')\n print('Average Error: {:0.4f} degrees.'.format(np.mean(errors)))\n print('Accuracy= {:0.2f}%.'.format(accuracy))\n ", "_____no_output_____" ], [ "# predict and run metric \nscores= lin_mod.predict(X_test) \nprint_metrics(y_test, scores)\nprint_evalute(y_test, scores)", "Mean Square Error = 34.300889201469865\nRoot Mean Square Error = 5.856696099463405\nMean Absolute Error = 4.394083754414235\nMedian Absolute Error = 3.3884315387464348\nR^2 = 0.9522663978047706\nAdjusted R^2 = 0.9500966886140784\nModel Performance\nAverage Error: 4.3941 degrees.\nAccuracy= 93.28%.\n" ], [ "# function to compute for the residuals \ndef hist_residue(y_test, y_score):\n ## compute vector of residuals\n residue = np.subtract(y_test.reshape(-1,1), y_score.reshape(-1,1))\n # making a plot \n sns.distplot(residue)\n plt.title('Histogram of residuals')\n plt.xlabel('Residual value')\n plt.ylabel('Count')\n plt.show()\n \nhist_residue(y_test,scores)", "/usr/lib/python3/dist-packages/scipy/stats/stats.py:1713: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result.\n return np.add.reduce(sorted[indexer] * weights, axis=axis) / sumval\n" ], [ "def plot_residue(y_test, y_score):\n ## compute vector of residuals\n residue = np.subtract(y_test.reshape(-1,1), y_score.reshape(-1,1))\n # making a plot \n sns.regplot(y_score, residue, fit_reg= False)\n plt.title('Residuals vs Predicted values')\n plt.xlabel('Predicted Values')\n plt.ylabel('Residuals')\n plt.show()\n\nplot_residue(y_test,scores)\n ", "_____no_output_____" ] ], [ [ "The residual are not normally distrubuted as expected. Also there is a pattern for lower Average residuals. This indicate the model is not generalize as expected. ", "_____no_output_____" ] ], [ [ "# using the polynomial regression to define and fit. \nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.preprocessing import PolynomialFeatures\n\npoly_mod= make_pipeline(PolynomialFeatures(4),\n linear_model.LinearRegression())\npoly_mod.fit(X_train,y_train)\nscores = poly_mod.predict(X_test)\nprint_metrics(y_test,scores)\nprint_evalute(y_test, scores)\nhist_residue(y_test,scores)\nplot_residue(y_test,scores)", "Mean Square Error = 7.956054222480307\nRoot Mean Square Error = 2.8206478373735893\nMean Absolute Error = 2.2826290766398114\nMedian Absolute Error = 2.027923583984375\nR^2 = 0.9889282424992272\nAdjusted R^2 = 0.9884249807946466\nModel Performance\nAverage Error: 2.2826 degrees.\nAccuracy= 96.32%.\n" ] ], [ [ "Comparing the polynomial feature to the linear regression. It can be seen that polynomial regression performs beter. The R2 and adj. R2 shows a good residual distrubution and also the histogram shows a form of a normal distribution. \nDue to this I will expore other model to see how best it goes.", "_____no_output_____" ] ], [ [ "# Running the gridCV for the GradientBoostingRegessor\n# to choose the best parameter for the GradientBoostingRegressor\nfrom sklearn.ensemble import GradientBoostingRegressor\ngbrt_mod= GradientBoostingRegressor(random_state=0)\nparam_grid= {\n 'n_estimators': [10,20,30,40,50,100,200,300,500],\n 'max_features': ['auto'],\n 'max_depth': [1,2,4,6,8,10],\n 'learning_rate': [0.1],\n 'subsample': [1]\n}\ngbrt_clf= ms.GridSearchCV(estimator=gbrt_mod, \n param_grid=param_grid,\n n_jobs=4,\n cv=5,\n scoring='neg_mean_squared_error')\ngbrt_clf.fit(X_train,y_train)\nprint(gbrt_clf.best_score_)\nprint(gbrt_clf.best_params_)", "-9.746757215191346\n{'learning_rate': 0.1, 'max_depth': 4, 'max_features': 'auto', 'n_estimators': 200, 'subsample': 1}\n" ], [ "# Using the GradientBoostingRegessor Tree \nfrom sklearn.ensemble import GradientBoostingRegressor\ngbrt_mod= GradientBoostingRegressor(n_estimators=200,\n max_depth=4)\ngbrt_mod.fit(X_train,y_train)\nscores= gbrt_mod.predict(X_test)\nprint_metrics(y_test,scores)\nprint_evalute(y_test, scores)\nhist_residue(y_test,scores)\nplot_residue(y_test,scores)", "Mean Square Error = 9.259138303535538\nRoot Mean Square Error = 3.042883222132512\nMean Absolute Error = 2.4327966433305996\nMedian Absolute Error = 2.119264615257041\nR^2 = 0.9871148522752397\nAdjusted R^2 = 0.986529163742296\nModel Performance\nAverage Error: 2.4328 degrees.\nAccuracy= 96.11%.\n" ], [ "# Using Neural network \nfrom sklearn.neural_network import MLPRegressor\nregressor_mod= MLPRegressor(hidden_layer_sizes= (100,),\n activation= 'tanh',\n learning_rate= 'adaptive',\n max_iter=1000,\n random_state=9,\n learning_rate_init=0.001)\nregressor_mod.fit(X_train, y_train)\nscores= regressor_mod.predict(X_test)\nprint_metrics(y_test,scores)\nprint_evalute(y_test, scores)\nhist_residue(y_test,scores)\nplot_residue(y_test,scores)", "Mean Square Error = 8.100806569138946\nRoot Mean Square Error = 2.8461915903780874\nMean Absolute Error = 2.296191108436508\nMedian Absolute Error = 2.1082752324486655\nR^2 = 0.9887268030877984\nAdjusted R^2 = 0.9882143850463346\nModel Performance\nAverage Error: 2.2962 degrees.\nAccuracy= 96.32%.\n" ], [ "# Using Random Forest \nfrom sklearn.ensemble import RandomForestRegressor\nrf_Regressor_mod= RandomForestRegressor(n_estimators=40)\nrf_Regressor_mod.fit(X_train, y_train)\nscores= rf_Regressor_mod.predict(X_test)\nprint_metrics(y_test,scores)\nprint_evalute(y_test, scores)\nhist_residue(y_test,scores)\nplot_residue(y_test,scores)", "Mean Square Error = 11.499512500000002\nRoot Mean Square Error = 3.391093112847243\nMean Absolute Error = 2.744166666666666\nMedian Absolute Error = 2.400000000000002\nR^2 = 0.9839971158797088\nAdjusted R^2 = 0.9832697120560593\nModel Performance\nAverage Error: 2.7442 degrees.\nAccuracy= 95.66%.\n" ], [ "# tunning Random Forest Regressor to get the best \n# parameters \nn_estimators= [int(x) for x in np.linspace(10,500,10)]# # trees in random forest\nmax_features= ['auto','sqrt']# # features to consider at every split\nmax_depth= [int(x) for x in np.linspace(10,100,10)]# # maximum number of levels in tree\nmax_depth.append(None)\nmin_samples_split= [2,5,10] # minimum # samples required at each split a node\nmin_samples_leaf= [1,2,4] # minimum # of samples required at each leaf node\nbootstrap= [True, False] # Method of selecting sample for training each tree \n\nparam_distributions= {'n_estimators': n_estimators,\n 'max_features': max_features,\n 'max_depth': max_depth,\n 'min_samples_split': min_samples_split,\n 'min_samples_leaf': min_samples_leaf,\n 'bootstrap': bootstrap}\n\nrf_Regressor_mod= RandomForestRegressor()\nrf_Regressor_clf= ms.RandomizedSearchCV(estimator= rf_Regressor_mod,\n param_distributions= param_distributions,\n n_iter= 100,\n cv=3,\n random_state=42,\n n_jobs=-1)\nrf_Regressor_clf.fit(X_train,y_train)\nprint('\\n')\nprint(rf_Regressor_clf.best_score_)\nprint(rf_Regressor_clf.best_params_)", "/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n/usr/local/lib/python3.7/dist-packages/sklearn/externals/joblib/externals/loky/process_executor.py:706: UserWarning: A worker stopped while some jobs were given to the executor. This can be caused by a too short worker timeout or by a memory leak.\n \"timeout or by a memory leak.\", UserWarning\n" ], [ "from sklearn.ensemble import RandomForestRegressor\nrf_Regressor_mod= RandomForestRegressor(n_estimators= 227,\n min_samples_split= 5,\n min_samples_leaf= 1,\n max_features= 'auto',\n max_depth= 10,\n bootstrap= 'True')\nrf_Regressor_mod.fit(X_train, y_train)\nscores= rf_Regressor_mod.predict(X_test)\nprint_metrics(y_test,scores)\nprint_evalute(y_test, scores)\nhist_residue(y_test,scores)\nplot_residue(y_test,scores)", "Mean Square Error = 9.571726313153945\nRoot Mean Square Error = 3.093820665965296\nMean Absolute Error = 2.4688942994223027\nMedian Absolute Error = 2.1969817511908794\nR^2 = 0.9866798503831755\nAdjusted R^2 = 0.9860743890369562\nModel Performance\nAverage Error: 2.4689 degrees.\nAccuracy= 96.04%.\n" ] ], [ [ "From all the models, it could be seen that ML regressor does good in general compared to the other models. ", "_____no_output_____" ] ], [ [ "# Testing the model on final test data \n# importing the final test data\nfinal= pd.read_csv('Data/AW_test.csv')\n\n# checking if there are duplicate \nprint(final.shape)\nprint(final.CustomerID.unique().shape)\n", "(500, 23)\n(500,)\n" ], [ "# calculate for age in age \nfinal['Age'] = generate_age(final,'%m/%d/%Y')\nfinal[['Age','BirthDate']].head()", "_____no_output_____" ], [ "encoded = encode_cat_features(final)\nnumeric_final_features = np.array(final[['Age','YearlyIncome', 'NumberChildrenAtHome']])\nfinal_test = np.concatenate([encoded,numeric_final_features], 1)\nfinal_test[:,11:13]= scaler.transform(final_test[:,11:13])", "/usr/local/lib/python3.7/dist-packages/sklearn/preprocessing/_encoders.py:368: FutureWarning: The handling of integer data will change in version 0.22. Currently, the categories are determined based on the range [0, max(values)], while in the future they will be determined based on the unique values.\nIf you want the future behaviour and silence this warning, you can specify \"categories='auto'\".\nIn case you used a LabelEncoder before this OneHotEncoder to convert the categories to integers, then you can now use the OneHotEncoder directly.\n warnings.warn(msg, FutureWarning)\n/usr/local/lib/python3.7/dist-packages/sklearn/preprocessing/_encoders.py:368: FutureWarning: The handling of integer data will change in version 0.22. Currently, the categories are determined based on the range [0, max(values)], while in the future they will be determined based on the unique values.\nIf you want the future behaviour and silence this warning, you can specify \"categories='auto'\".\nIn case you used a LabelEncoder before this OneHotEncoder to convert the categories to integers, then you can now use the OneHotEncoder directly.\n warnings.warn(msg, FutureWarning)\n/usr/local/lib/python3.7/dist-packages/sklearn/preprocessing/_encoders.py:368: FutureWarning: The handling of integer data will change in version 0.22. Currently, the categories are determined based on the range [0, max(values)], while in the future they will be determined based on the unique values.\nIf you want the future behaviour and silence this warning, you can specify \"categories='auto'\".\nIn case you used a LabelEncoder before this OneHotEncoder to convert the categories to integers, then you can now use the OneHotEncoder directly.\n warnings.warn(msg, FutureWarning)\n/usr/local/lib/python3.7/dist-packages/sklearn/preprocessing/_encoders.py:368: FutureWarning: The handling of integer data will change in version 0.22. Currently, the categories are determined based on the range [0, max(values)], while in the future they will be determined based on the unique values.\nIf you want the future behaviour and silence this warning, you can specify \"categories='auto'\".\nIn case you used a LabelEncoder before this OneHotEncoder to convert the categories to integers, then you can now use the OneHotEncoder directly.\n warnings.warn(msg, FutureWarning)\n" ], [ "final_scores= regressor_mod.predict(final_test)\nnp.savetxt('final_answer_regression.csv', final_scores, delimiter=',',fmt='%i')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
4a00caeefe2e25ea1f3a52e0af765ae2b45a3027
19,933
ipynb
Jupyter Notebook
Python-Data-Science-and-Machine-Learning-Bootcamp/Python-for-Data-Analysis/NumPy/Numpy Exercise .ipynb
ankit1601/PythonDataScience
360a49bfd1183d375f1a0a408ecc9f2efe55590d
[ "Apache-2.0" ]
null
null
null
Python-Data-Science-and-Machine-Learning-Bootcamp/Python-for-Data-Analysis/NumPy/Numpy Exercise .ipynb
ankit1601/PythonDataScience
360a49bfd1183d375f1a0a408ecc9f2efe55590d
[ "Apache-2.0" ]
null
null
null
Python-Data-Science-and-Machine-Learning-Bootcamp/Python-for-Data-Analysis/NumPy/Numpy Exercise .ipynb
ankit1601/PythonDataScience
360a49bfd1183d375f1a0a408ecc9f2efe55590d
[ "Apache-2.0" ]
null
null
null
20.872251
166
0.43596
[ [ [ "___\n\n<a href='http://www.pieriandata.com'> <img src='../Pierian_Data_Logo.png' /></a>\n___", "_____no_output_____" ], [ "# NumPy Exercises \n\nNow that we've learned about NumPy let's test your knowledge. We'll start off with a few simple tasks, and then you'll be asked some more complicated questions.", "_____no_output_____" ], [ "#### Import NumPy as np", "_____no_output_____" ] ], [ [ "import numpy as np", "_____no_output_____" ], [ "#", "_____no_output_____" ] ], [ [ "#### Create an array of 10 zeros ", "_____no_output_____" ] ], [ [ "np.zeros(10)", "_____no_output_____" ] ], [ [ "#### Create an array of 10 ones", "_____no_output_____" ] ], [ [ "np.ones(10)", "_____no_output_____" ], [ "#", "_____no_output_____" ] ], [ [ "#### Create an array of 10 fives", "_____no_output_____" ] ], [ [ "np.ones(10)*5", "_____no_output_____" ] ], [ [ "#### Create an array of the integers from 10 to 50", "_____no_output_____" ] ], [ [ "np.arange(10,51)", "_____no_output_____" ] ], [ [ "#### Create an array of all the even integers from 10 to 50", "_____no_output_____" ] ], [ [ "np.arange(10,51,2)", "_____no_output_____" ] ], [ [ "#### Create a 3x3 matrix with values ranging from 0 to 8", "_____no_output_____" ] ], [ [ "np.arange(0,9).reshape(3,3)", "_____no_output_____" ] ], [ [ "#### Create a 3x3 identity matrix", "_____no_output_____" ] ], [ [ "np.eye(3)", "_____no_output_____" ] ], [ [ "#### Use NumPy to generate a random number between 0 and 1", "_____no_output_____" ] ], [ [ "np.random.rand(1)", "_____no_output_____" ] ], [ [ "#### Use NumPy to generate an array of 25 random numbers sampled from a standard normal distribution", "_____no_output_____" ] ], [ [ "np.random.randn(25)", "_____no_output_____" ] ], [ [ "#### Create the following matrix:", "_____no_output_____" ] ], [ [ "np.arange(1,101).reshape(10,10)/100 #np.linespace(0.01,1,100).reshape(10,10)", "_____no_output_____" ] ], [ [ "#### Create an array of 20 linearly spaced points between 0 and 1:", "_____no_output_____" ] ], [ [ "np.linspace(0,1,20)", "_____no_output_____" ] ], [ [ "## Numpy Indexing and Selection\n\nNow you will be given a few matrices, and be asked to replicate the resulting matrix outputs:", "_____no_output_____" ] ], [ [ "mat = np.arange(1,26).reshape(5,5)\nmat", "_____no_output_____" ], [ "# WRITE CODE HERE THAT REPRODUCES THE OUTPUT OF THE CELL BELOW\n# BE CAREFUL NOT TO RUN THE CELL BELOW, OTHERWISE YOU WON'T\n# BE ABLE TO SEE THE OUTPUT ANY MORE\nmat[2:,1:]", "_____no_output_____" ], [ "# WRITE CODE HERE THAT REPRODUCES THE OUTPUT OF THE CELL BELOW\n# BE CAREFUL NOT TO RUN THE CELL BELOW, OTHERWISE YOU WON'T\n# BE ABLE TO SEE THE OUTPUT ANY MORE\nmat[3,4]", "_____no_output_____" ], [ "# WRITE CODE HERE THAT REPRODUCES THE OUTPUT OF THE CELL BELOW\n# BE CAREFUL NOT TO RUN THE CELL BELOW, OTHERWISE YOU WON'T\n# BE ABLE TO SEE THE OUTPUT ANY MORE\nmat[0:3,1:2]", "_____no_output_____" ], [ "# WRITE CODE HERE THAT REPRODUCES THE OUTPUT OF THE CELL BELOW\n# BE CAREFUL NOT TO RUN THE CELL BELOW, OTHERWISE YOU WON'T\n# BE ABLE TO SEE THE OUTPUT ANY MORE\nmat[4]", "_____no_output_____" ], [ "# WRITE CODE HERE THAT REPRODUCES THE OUTPUT OF THE CELL BELOW\n# BE CAREFUL NOT TO RUN THE CELL BELOW, OTHERWISE YOU WON'T\n# BE ABLE TO SEE THE OUTPUT ANY MORE\nmat[3:5]", "_____no_output_____" ] ], [ [ "### Now do the following", "_____no_output_____" ], [ "#### Get the sum of all the values in mat", "_____no_output_____" ] ], [ [ "mat.sum()", "_____no_output_____" ] ], [ [ "#### Get the standard deviation of the values in mat", "_____no_output_____" ] ], [ [ "mat.std()", "_____no_output_____" ] ], [ [ "#### Get the sum of all the columns in mat", "_____no_output_____" ] ], [ [ "mat.sum(axis=0)", "_____no_output_____" ] ], [ [ "# Great Job!", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]